From e219ad401cf892a1a42aa26d732dc86b34232fcd Mon Sep 17 00:00:00 2001 From: Yonik Seeley Date: Mon, 27 Jun 2011 17:52:02 +0000 Subject: [PATCH] SOLR-2429: ability not not cache filters and post filtering git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1140252 13f79535-47bb-0310-9956-ffa450edef68 --- solr/CHANGES.txt | 9 + .../solr/common/params/CommonParams.java | 11 + .../handler/component/QueryComponent.java | 1 + .../solr/search/BoostQParserPlugin.java | 4 +- .../solr/search/DelegatingCollector.java | 75 +++ .../org/apache/solr/search/DisMaxQParser.java | 6 +- .../org/apache/solr/search/ExtendedQuery.java | 40 ++ .../apache/solr/search/ExtendedQueryBase.java | 74 +++ .../search/FunctionRangeQParserPlugin.java | 54 +- .../java/org/apache/solr/search/Grouping.java | 13 +- .../org/apache/solr/search/PostFilter.java | 47 ++ .../java/org/apache/solr/search/QParser.java | 29 ++ .../org/apache/solr/search/QueryUtils.java | 12 + .../solr/search/SolrConstantScoreQuery.java | 34 +- .../apache/solr/search/SolrIndexSearcher.java | 479 +++++++++++++----- .../org/apache/solr/search/WrappedQuery.java | 96 ++++ .../solr/search/function/DocValues.java | 2 +- .../solr/search/function/ValueSource.java | 61 --- .../function/ValueSourceRangeFilter.java | 21 + .../search/function/ValueSourceScorer.java | 85 ++++ .../org/apache/solr/SolrTestCaseJ4.java | 1 + .../org/apache/solr/search/TestFiltering.java | 322 ++++++++++++ .../apache/solr/search/TestQueryTypes.java | 10 + .../apache/solr/search/TestSearchPerf.java | 8 +- 24 files changed, 1297 insertions(+), 197 deletions(-) create mode 100644 solr/src/java/org/apache/solr/search/DelegatingCollector.java create mode 100644 solr/src/java/org/apache/solr/search/ExtendedQuery.java create mode 100644 solr/src/java/org/apache/solr/search/ExtendedQueryBase.java create mode 100644 solr/src/java/org/apache/solr/search/PostFilter.java create mode 100644 solr/src/java/org/apache/solr/search/WrappedQuery.java create mode 100644 solr/src/java/org/apache/solr/search/function/ValueSourceScorer.java create mode 100644 solr/src/test/org/apache/solr/search/TestFiltering.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index c7d00cf41e5..9310c59a693 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -257,6 +257,15 @@ New Features * LUCENE-3234: add a new parameter hl.phraseLimit for FastVectorHighlighter speed up. (Mike Sokolov via koji) +* SOLR-2429: Ability to add cache=false to queries and query filters to avoid + using the filterCache or queryCache. A cost may also be specified and is used + to order the evaluation of non-cached filters from least to greatest cost . + For very expensive query filters (cost >= 100) if the query implements + the PostFilter interface, it will be used to obtain a Collector that is + checked only for documents that match the main query and all other filters. + The "frange" query now implements the PostFilter interface. (yonik) + + Optimizations ---------------------- diff --git a/solr/src/common/org/apache/solr/common/params/CommonParams.java b/solr/src/common/org/apache/solr/common/params/CommonParams.java index c981c4ce9ff..bd047757de5 100755 --- a/solr/src/common/org/apache/solr/common/params/CommonParams.java +++ b/solr/src/common/org/apache/solr/common/params/CommonParams.java @@ -153,5 +153,16 @@ public interface CommonParams { public static final String THREADS = "threads"; public static final String TRUE = Boolean.TRUE.toString(); public static final String FALSE = Boolean.FALSE.toString(); + + /** Used as a local parameter on queries. cache=false means don't check any query or filter caches. + * cache=true is the default. + */ + public static final String CACHE = "cache"; + + /** Used as a local param on filter queries in conjunction with cache=false. Filters are checked in order, from + * smallest cost to largest. If cost>=100 and the query implements PostFilter, then that interface will be used to do post query filtering. + */ + public static final String COST = "cost"; + } diff --git a/solr/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/src/java/org/apache/solr/handler/component/QueryComponent.java index 1e2dead111c..91b884c7536 100644 --- a/solr/src/java/org/apache/solr/handler/component/QueryComponent.java +++ b/solr/src/java/org/apache/solr/handler/component/QueryComponent.java @@ -287,6 +287,7 @@ public class QueryComponent extends SearchComponent DocListAndSet res = new DocListAndSet(); res.docList = new DocSlice(0, docs, luceneIds, null, docs, 0); if (rb.isNeedDocSet()) { + // TODO: create a cache for this! List queries = new ArrayList(); queries.add(rb.getQuery()); List filters = rb.getFilters(); diff --git a/solr/src/java/org/apache/solr/search/BoostQParserPlugin.java b/solr/src/java/org/apache/solr/search/BoostQParserPlugin.java index 334f1eed83c..4cf5aaa94d9 100755 --- a/solr/src/java/org/apache/solr/search/BoostQParserPlugin.java +++ b/solr/src/java/org/apache/solr/search/BoostQParserPlugin.java @@ -54,10 +54,10 @@ public class BoostQParserPlugin extends QParserPlugin { public Query parse() throws ParseException { b = localParams.get(BOOSTFUNC); baseParser = subQuery(localParams.get(QueryParsing.V), null); - Query q = baseParser.parse(); + Query q = baseParser.getQuery(); if (b == null) return q; - Query bq = subQuery(b, FunctionQParserPlugin.NAME).parse(); + Query bq = subQuery(b, FunctionQParserPlugin.NAME).getQuery(); if (bq instanceof FunctionQuery) { vs = ((FunctionQuery)bq).getValueSource(); } else { diff --git a/solr/src/java/org/apache/solr/search/DelegatingCollector.java b/solr/src/java/org/apache/solr/search/DelegatingCollector.java new file mode 100644 index 00000000000..83cfac7b7a8 --- /dev/null +++ b/solr/src/java/org/apache/solr/search/DelegatingCollector.java @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.Scorer; + +import java.io.IOException; + + +/** A simple delegating collector where one can set the delegate after creation */ +public class DelegatingCollector extends Collector { + static int setLastDelegateCount; // for testing purposes only to determine the number of times a delegating collector chain was used + + protected Collector delegate; + protected Scorer scorer; + protected IndexReader.AtomicReaderContext context; + protected int docBase; + + public Collector getDelegate() { + return delegate; + } + + public void setDelegate(Collector delegate) { + this.delegate = delegate; + } + + /** Sets the last delegate in a chain of DelegatingCollectors */ + public void setLastDelegate(Collector delegate) { + DelegatingCollector ptr = this; + for(; ptr.getDelegate() instanceof DelegatingCollector; ptr = (DelegatingCollector)ptr.getDelegate()); + ptr.setDelegate(delegate); + setLastDelegateCount++; + } + + @Override + public void setScorer(Scorer scorer) throws IOException { + this.scorer = scorer; + delegate.setScorer(scorer); + } + + @Override + public void collect(int doc) throws IOException { + delegate.collect(doc); + } + + @Override + public void setNextReader(IndexReader.AtomicReaderContext context) throws IOException { + this.context = context; + this.docBase = context.docBase; + delegate.setNextReader(context); + } + + @Override + public boolean acceptsDocsOutOfOrder() { + return delegate.acceptsDocsOutOfOrder(); + } +} diff --git a/solr/src/java/org/apache/solr/search/DisMaxQParser.java b/solr/src/java/org/apache/solr/search/DisMaxQParser.java index 483340509d3..965fe9961de 100644 --- a/solr/src/java/org/apache/solr/search/DisMaxQParser.java +++ b/solr/src/java/org/apache/solr/search/DisMaxQParser.java @@ -106,7 +106,7 @@ public class DisMaxQParser extends QParser { if (null == boostFunc || "".equals(boostFunc)) continue; Map ff = SolrPluginUtils.parseFieldBoosts(boostFunc); for (String f : ff.keySet()) { - Query fq = subQuery(f, FunctionQParserPlugin.NAME).parse(); + Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery(); Float b = ff.get(f); if (null != b) { fq.setBoost(b); @@ -125,7 +125,7 @@ public class DisMaxQParser extends QParser { boostQueries = new ArrayList(); for (String qs : boostParams) { if (qs.trim().length() == 0) continue; - Query q = subQuery(qs, null).parse(); + Query q = subQuery(qs, null).getQuery(); boostQueries.add(q); } } @@ -190,7 +190,7 @@ public class DisMaxQParser extends QParser { String altQ = solrParams.get(DisMaxParams.ALTQ); if (altQ != null) { QParser altQParser = subQuery(altQ, null); - return altQParser.parse(); + return altQParser.getQuery(); } else { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "missing query string"); } diff --git a/solr/src/java/org/apache/solr/search/ExtendedQuery.java b/solr/src/java/org/apache/solr/search/ExtendedQuery.java new file mode 100644 index 00000000000..92a217c1367 --- /dev/null +++ b/solr/src/java/org/apache/solr/search/ExtendedQuery.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + +/** The ExtendedQuery interface provides extra metadata to a query. + * Implementations of ExtendedQuery must also extend Query. + */ +public interface ExtendedQuery { + /** Should this query be cached in the query cache or filter cache. */ + public boolean getCache(); + + public void setCache(boolean cache); + + /** Returns the cost of this query, used to order checking of filters that are not cached. + * If getCache()==false && getCost()>=100 && this instanceof PostFilter, then + * the PostFilter interface will be used for filtering. + */ + public int getCost(); + + public void setCost(int cost); + + /** If true, the clauses of this boolean query should be cached separately. This is not yet implemented. */ + public boolean getCacheSep(); + public void setCacheSep(boolean cacheSep); +} diff --git a/solr/src/java/org/apache/solr/search/ExtendedQueryBase.java b/solr/src/java/org/apache/solr/search/ExtendedQueryBase.java new file mode 100644 index 00000000000..48e31ee8f38 --- /dev/null +++ b/solr/src/java/org/apache/solr/search/ExtendedQueryBase.java @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + +import org.apache.lucene.search.Query; + +public class ExtendedQueryBase extends Query implements ExtendedQuery { + private int cost; + private boolean cache = true; + private boolean cacheSep; + + @Override + public void setCache(boolean cache) { + this.cache = cache; + } + + @Override + public boolean getCache() { + return cache; + } + + @Override + public void setCacheSep(boolean cacheSep) { + this.cacheSep = cacheSep; + } + + @Override + public boolean getCacheSep() { + return cacheSep; + } + + @Override + public void setCost(int cost) { + this.cost = cost; + } + + public int getCost() { + return cost; + } + + public String getOptions() { + StringBuilder sb = new StringBuilder(); + if (!cache) { + sb.append("{!cache=false"); + sb.append(" cost="); + sb.append(cost); + sb.append("}"); + } else if (cacheSep) { + sb.append("{!cache=sep"); + sb.append("}"); + } + return sb.toString(); + } + + @Override + public String toString(String field) { + return getOptions(); + } +} diff --git a/solr/src/java/org/apache/solr/search/FunctionRangeQParserPlugin.java b/solr/src/java/org/apache/solr/search/FunctionRangeQParserPlugin.java index 2283e494ffd..c87281f1343 100755 --- a/solr/src/java/org/apache/solr/search/FunctionRangeQParserPlugin.java +++ b/solr/src/java/org/apache/solr/search/FunctionRangeQParserPlugin.java @@ -16,13 +16,18 @@ */ package org.apache.solr.search; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.queryParser.ParseException; -import org.apache.lucene.search.Query; +import org.apache.lucene.search.*; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.SolrConfig; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.function.*; +import java.io.IOException; +import java.util.Map; + /** * Create a range query over a function. *
Other parameters: @@ -48,7 +53,7 @@ public class FunctionRangeQParserPlugin extends QParserPlugin { @Override public Query parse() throws ParseException { funcStr = localParams.get(QueryParsing.V, null); - Query funcQ = subQuery(funcStr, FunctionQParserPlugin.NAME).parse(); + Query funcQ = subQuery(funcStr, FunctionQParserPlugin.NAME).getQuery(); if (funcQ instanceof FunctionQuery) { vs = ((FunctionQuery)funcQ).getValueSource(); } else { @@ -62,10 +67,51 @@ public class FunctionRangeQParserPlugin extends QParserPlugin { // TODO: add a score=val option to allow score to be the value ValueSourceRangeFilter rf = new ValueSourceRangeFilter(vs, l, u, includeLower, includeUpper); - SolrConstantScoreQuery csq = new SolrConstantScoreQuery(rf); - return csq; + FunctionRangeQuery frq = new FunctionRangeQuery(rf); + return frq; } }; } } + +// This class works as either a normal constant score query, or as a PostFilter using a collector +class FunctionRangeQuery extends SolrConstantScoreQuery implements PostFilter { + final ValueSourceRangeFilter rangeFilt; + + public FunctionRangeQuery(ValueSourceRangeFilter filter) { + super(filter); + this.rangeFilt = filter; + } + + @Override + public DelegatingCollector getFilterCollector(IndexSearcher searcher) { + Map fcontext = ValueSource.newContext(searcher); + return new FunctionRangeCollector(fcontext); + } + + class FunctionRangeCollector extends DelegatingCollector { + final Map fcontext; + ValueSourceScorer scorer; + int maxdoc; + + public FunctionRangeCollector(Map fcontext) { + this.fcontext = fcontext; + } + + @Override + public void collect(int doc) throws IOException { + if (doc + * The filtering mechanism used is a {@link DelegatingCollector} + * that allows the filter to not call the delegate for certain documents, + * thus effectively filtering them out. This also avoids the normal + * filter advancing mechanism which asks for the first acceptable document on + * or after the target (which is undesirable for expensive filters). + * This collector interface also enables better performance when an external system + * must be consulted, since document ids may be buffered and batched into + * a single request to the external system. + *

+ * Implementations of this interface must also be a Query. + * If an implementation can only support the collector method of + * filtering through getFilterCollector, then ExtendedQuery.getCached() + * should always return false, and ExtendedQuery.getCost() should + * return no less than 100. + * + * @see ExtendedQueryBase + */ +public interface PostFilter extends ExtendedQuery { + + /** Returns a DelegatingCollector to be run after the main query and all of it's filters, but before any sorting or grouping collectors */ + public DelegatingCollector getFilterCollector(IndexSearcher searcher); +} diff --git a/solr/src/java/org/apache/solr/search/QParser.java b/solr/src/java/org/apache/solr/search/QParser.java index 9ae8f4dbd93..50504648baa 100755 --- a/solr/src/java/org/apache/solr/search/QParser.java +++ b/solr/src/java/org/apache/solr/search/QParser.java @@ -141,10 +141,39 @@ public abstract class QParser { public Query getQuery() throws ParseException { if (query==null) { query=parse(); + + if (localParams != null) { + String cacheStr = localParams.get(CommonParams.CACHE); + if (cacheStr != null) { + if (CommonParams.FALSE.equals(cacheStr)) { + extendedQuery().setCache(false); + } else if (CommonParams.TRUE.equals(cacheStr)) { + extendedQuery().setCache(true); + } else if ("sep".equals(cacheStr)) { + extendedQuery().setCacheSep(true); + } + } + + int cost = localParams.getInt(CommonParams.COST, Integer.MIN_VALUE); + if (cost != Integer.MIN_VALUE) { + extendedQuery().setCost(cost); + } + } } return query; } + // returns an extended query (and sets "query" to a new wrapped query if necessary) + private ExtendedQuery extendedQuery() { + if (query instanceof ExtendedQuery) { + return (ExtendedQuery)query; + } else { + WrappedQuery wq = new WrappedQuery(query); + query = wq; + return wq; + } + } + private void checkRecurse() throws ParseException { if (recurseCount++ >= 100) { throw new ParseException("Infinite Recursion detected parsing query '" + qstr + "'"); diff --git a/solr/src/java/org/apache/solr/search/QueryUtils.java b/solr/src/java/org/apache/solr/search/QueryUtils.java index f8cd6e9e4d6..d315f98b6a1 100755 --- a/solr/src/java/org/apache/solr/search/QueryUtils.java +++ b/solr/src/java/org/apache/solr/search/QueryUtils.java @@ -52,6 +52,15 @@ public class QueryUtils { * @return */ static Query getAbs(Query q) { + if (q instanceof WrappedQuery) { + Query subQ = ((WrappedQuery)q).getWrappedQuery(); + Query absSubQ = getAbs(subQ); + if (absSubQ == subQ) return q; + WrappedQuery newQ = (WrappedQuery)q.clone(); + newQ.setWrappedQuery(absSubQ); + return newQ; + } + if (!(q instanceof BooleanQuery)) return q; BooleanQuery bq = (BooleanQuery)q; @@ -87,6 +96,9 @@ public class QueryUtils { * lucene. */ static Query makeQueryable(Query q) { + if (q instanceof WrappedQuery) { + return makeQueryable(((WrappedQuery)q).getWrappedQuery()); + } return isNegative(q) ? fixNegativeQuery(q) : q; } diff --git a/solr/src/java/org/apache/solr/search/SolrConstantScoreQuery.java b/solr/src/java/org/apache/solr/search/SolrConstantScoreQuery.java index 357ee668d4d..69bf4b4881d 100755 --- a/solr/src/java/org/apache/solr/search/SolrConstantScoreQuery.java +++ b/solr/src/java/org/apache/solr/search/SolrConstantScoreQuery.java @@ -34,7 +34,9 @@ import java.util.Map; * * Experimental and subject to change. */ -public class SolrConstantScoreQuery extends ConstantScoreQuery { +public class SolrConstantScoreQuery extends ConstantScoreQuery implements ExtendedQuery { + boolean cache = true; // cache by default + int cost; public SolrConstantScoreQuery(Filter filter) { super(filter); @@ -46,6 +48,36 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery { return filter; } + @Override + public void setCache(boolean cache) { + this.cache = cache; + } + + @Override + public boolean getCache() { + return cache; + } + + @Override + public void setCacheSep(boolean cacheSep) { + } + + @Override + public boolean getCacheSep() { + return false; + } + + @Override + public void setCost(int cost) { + this.cost = cost; + } + + @Override + public int getCost() { + return cost; + } + + @Override public Query rewrite(IndexReader reader) throws IOException { return this; diff --git a/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java index 2094cc8cd29..54c93818864 100644 --- a/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -542,6 +542,17 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { * The DocSet returned should not be modified. */ public DocSet getDocSet(Query query) throws IOException { + if (query instanceof ExtendedQuery) { + ExtendedQuery eq = (ExtendedQuery)query; + if (!eq.getCache()) { + if (query instanceof WrappedQuery) { + query = ((WrappedQuery)query).getWrappedQuery(); + } + query = QueryUtils.makeQueryable(query); + return getDocSetNC(query, null); + } + } + // Get the absolute value (positive version) of this query. If we // get back the same reference, we know it's positive. Query absQ = QueryUtils.getAbs(query); @@ -574,12 +585,29 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { if (answer!=null) return answer; } answer = getDocSetNC(q,null); - if (filterCache != null) filterCache.put(q,answer); + if (filterCache != null) filterCache.put( + q,answer); return answer; } private static Query matchAllDocsQuery = new MatchAllDocsQuery(); + + static class ProcessedFilter { + DocSet answer; // the answer, if non-null + Filter filter; + DelegatingCollector postFilter; + } + + + private static Comparator sortByCost = new Comparator() { + @Override + public int compare(Query q1, Query q2) { + return ((ExtendedQuery)q1).getCost() - ((ExtendedQuery)q2).getCost(); + } + }; + + /** * Returns the set of document ids matching all queries. * This method is cache-aware and attempts to retrieve the answer from the cache if possible. @@ -589,123 +617,161 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { * The DocSet returned should not be modified. */ public DocSet getDocSet(List queries) throws IOException { - if (queries==null) return null; - if (queries.size()==1) return getDocSet(queries.get(0)); - DocSet answer=null; + ProcessedFilter pf = getProcessedFilter(null, queries); + if (pf.answer != null) return pf.answer; - boolean[] neg = new boolean[queries.size()]; - DocSet[] sets = new DocSet[queries.size()]; - int smallestIndex = -1; - int smallestCount = Integer.MAX_VALUE; - for (int i=0; i>6, maxDoc()); + Collector collector = setCollector; + if (pf.postFilter != null) { + pf.postFilter.setLastDelegate(collector); + collector = pf.postFilter; + } + + final AtomicReaderContext[] leaves = leafContexts; + + + for (int i=0; i queries) throws IOException { + ProcessedFilter pf = new ProcessedFilter(); + if (queries==null || queries.size()==0) { + if (setFilter != null) + pf.filter = setFilter.getTopFilter(); + return pf; + } + + DocSet answer=null; + + boolean[] neg = new boolean[queries.size()+1]; + DocSet[] sets = new DocSet[queries.size()+1]; + List notCached = null; + List postFilters = null; + + int end = 0; + int smallestIndex = -1; + + if (setFilter != null) { + answer = sets[end++] = setFilter; + smallestIndex = end; + } + + int smallestCount = Integer.MAX_VALUE; + for (Query q : queries) { + if (q instanceof ExtendedQuery) { + ExtendedQuery eq = (ExtendedQuery)q; + if (!eq.getCache()) { + if (eq.getCost() >= 100 && eq instanceof PostFilter) { + if (postFilters == null) postFilters = new ArrayList(sets.length-end); + postFilters.add(q); + } else { + if (notCached == null) notCached = new ArrayList(sets.length-end); + notCached.add(q); + } + continue; + } + } + + Query posQuery = QueryUtils.getAbs(q); + sets[end] = getPositiveDocSet(posQuery); + // Negative query if absolute value different from original + if (q==posQuery) { + neg[end] = false; + // keep track of the smallest positive set. + // This optimization is only worth it if size() is cached, which it would + // be if we don't do any set operations. + int sz = sets[end].size(); + if (sz 0 && answer==null) { + answer = getPositiveDocSet(matchAllDocsQuery); + } // do negative queries first to shrink set size - for (int i=0; i queries) throws IOException { - Filter answer = setFilter == null ? null : setFilter.getTopFilter(); - - if (queries == null || queries.size() == 0) { - return answer; - } - - if (answer == null && queries.size() == 1) { - return getFilter(queries.get(0)); - } - - - DocSet finalSet=null; - - int nDocSets =0; - boolean[] neg = new boolean[queries.size()]; - DocSet[] sets = new DocSet[queries.size()]; - Query[] nocache = new Query[queries.size()]; - - int smallestIndex = -1; - int smallestCount = Integer.MAX_VALUE; - for (Query q : queries) { - // if (q instanceof) - - - Query posQuery = QueryUtils.getAbs(q); - sets[nDocSets] = getPositiveDocSet(posQuery); - // Negative query if absolute value different from original - if (q==posQuery) { - neg[nDocSets] = false; - // keep track of the smallest positive set. - // This optimization is only worth it if size() is cached, which it would - // be if we don't do any set operations. - int sz = sets[nDocSets].size(); - if (sz weights = new ArrayList(notCached.size()); + for (Query q : notCached) { + Query qq = QueryUtils.makeQueryable(q); + weights.add(createNormalizedWeight(qq)); + } + pf.filter = new FilterImpl(answer, weights); + } else { + if (postFilters == null) { + if (answer == null) { + answer = getPositiveDocSet(matchAllDocsQuery); } - } else { - neg[nDocSets] = true; + // "answer" is the only part of the filter, so set it. + pf.answer = answer; } - nDocSets++; + if (answer != null) { + pf.filter = answer.getTopFilter(); + } } - // if no positive queries, start off with all docs - if (finalSet==null) finalSet = getPositiveDocSet(matchAllDocsQuery); - - // do negative queries first to shrink set size - for (int i=0; i=0; i--) { + DelegatingCollector prev = pf.postFilter; + pf.postFilter = ((PostFilter)postFilters.get(i)).getFilterCollector(this); + if (prev != null) pf.postFilter.setDelegate(prev); + } } - for (int i=0; i maxDoc()) maxDocRequested = maxDoc(); int supersetMaxDoc= maxDocRequested; - DocList superset; + DocList superset = null; + + int flags = cmd.getFlags(); + Query q = cmd.getQuery(); + if (q instanceof ExtendedQuery) { + ExtendedQuery eq = (ExtendedQuery)q; + if (!eq.getCache()) { + flags |= (NO_CHECK_QCACHE | NO_SET_QCACHE | NO_CHECK_FILTERCACHE); + } + } + // we can try and look up the complete query in the cache. // we can't do that if filter!=null though (we don't want to // do hashCode() and equals() for a big DocSet). - if (queryResultCache != null && cmd.getFilter()==null) { + if (queryResultCache != null && cmd.getFilter()==null + && (flags & (NO_CHECK_QCACHE|NO_SET_QCACHE)) != ((NO_CHECK_QCACHE|NO_SET_QCACHE))) + { // all of the current flags can be reused during warming, // so set all of them on the cache key. - key = new QueryResultKey(cmd.getQuery(), cmd.getFilterList(), cmd.getSort(), cmd.getFlags()); - if ((cmd.getFlags() & NO_CHECK_QCACHE)==0) { + key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags); + if ((flags & NO_CHECK_QCACHE)==0) { superset = queryResultCache.get(key); if (superset != null) { // check that the cache entry has scores recorded if we need them - if ((cmd.getFlags() & GET_SCORES)==0 || superset.hasScores()) { + if ((flags & GET_SCORES)==0 || superset.hasScores()) { // NOTE: subset() returns null if the DocList has fewer docs than // requested out.docList = superset.subset(cmd.getOffset(),cmd.getLen()); @@ -983,12 +1073,11 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { // found the docList in the cache... now check if we need the docset too. // OPT: possible future optimization - if the doclist contains all the matches, // use it to make the docset instead of rerunning the query. - if (out.docSet==null && ((cmd.getFlags() & GET_DOCSET)!=0) ) { + if (out.docSet==null && ((flags & GET_DOCSET)!=0) ) { if (cmd.getFilterList()==null) { out.docSet = getDocSet(cmd.getQuery()); } else { - List newList = new ArrayList(cmd.getFilterList() -.size()+1); + List newList = new ArrayList(cmd.getFilterList().size()+1); newList.add(cmd.getQuery()); newList.addAll(cmd.getFilterList()); out.docSet = getDocSet(newList); @@ -998,9 +1087,10 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { } } - // If we are going to generate the result, bump up to the - // next resultWindowSize for better caching. + // If we are going to generate the result, bump up to the + // next resultWindowSize for better caching. + if ((flags & NO_SET_QCACHE) == 0) { // handle 0 special case as well as avoid idiv in the common case. if (maxDocRequested < queryResultWindowSize) { supersetMaxDoc=queryResultWindowSize; @@ -1008,6 +1098,9 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { supersetMaxDoc = ((maxDocRequested -1)/queryResultWindowSize + 1)*queryResultWindowSize; if (supersetMaxDoc < 0) supersetMaxDoc=maxDocRequested; } + } else { + key = null; // we won't be caching the result + } } @@ -1020,7 +1113,7 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { // check if we should try and use the filter cache boolean useFilterCache=false; - if ((cmd.getFlags() & (GET_SCORES|NO_CHECK_FILTERCACHE))==0 && useFilterForSortedQuery && cmd.getSort() != null && filterCache != null) { + if ((flags & (GET_SCORES|NO_CHECK_FILTERCACHE))==0 && useFilterForSortedQuery && cmd.getSort() != null && filterCache != null) { useFilterCache=true; SortField[] sfields = cmd.getSort().getSort(); for (SortField sf : sfields) { @@ -1049,7 +1142,7 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { } else { // do it the normal way... cmd.setSupersetMaxDoc(supersetMaxDoc); - if ((cmd.getFlags() & GET_DOCSET)!=0) { + if ((flags & GET_DOCSET)!=0) { // this currently conflates returning the docset for the base query vs // the base query and all filters. DocSet qDocSet = getDocListAndSetNC(qr,cmd); @@ -1059,8 +1152,10 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { getDocListNC(qr,cmd); //Parameters: cmd.getQuery(),theFilt,cmd.getSort(),0,supersetMaxDoc,cmd.getFlags(),cmd.getTimeAllowed(),responseHeader); } - superset = out.docList; - out.docList = superset.subset(cmd.getOffset(),cmd.getLen()); + if (key != null) { + superset = out.docList; + out.docList = superset.subset(cmd.getOffset(),cmd.getLen()); + } } // lastly, put the superset in the cache if the size is less than or equal @@ -1073,9 +1168,6 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { private void getDocListNC(QueryResult qr,QueryCommand cmd) throws IOException { - //Parameters: cmd.getQuery(),theFilt,cmd.getSort(),0,supersetMaxDoc,cmd.getFlags(),cmd.getTimeAllowed(),responseHeader); - //Query query, DocSet filter, Sort lsort, int offset, int len, int flags, long timeAllowed, NamedList responseHeader - DocSet filter = cmd.getFilter()!=null ? cmd.getFilter() : getDocSet(cmd.getFilterList()); final long timeAllowed = cmd.getTimeAllowed(); int len = cmd.getSupersetMaxDoc(); int last = len; @@ -1091,7 +1183,8 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { Query query = QueryUtils.makeQueryable(cmd.getQuery()); - final Filter luceneFilter = filter==null ? null : filter.getTopFilter(); + ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList()); + final Filter luceneFilter = pf.filter; // handle zero case... if (lastDocRequested<=0) { @@ -1143,6 +1236,11 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { if( timeAllowed > 0 ) { collector = new TimeLimitingCollector(collector, timeAllowed); } + if (pf.postFilter != null) { + pf.postFilter.setLastDelegate(collector); + collector = pf.postFilter; + } + try { super.search(query, luceneFilter, collector); } @@ -1167,6 +1265,10 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { if( timeAllowed > 0 ) { collector = new TimeLimitingCollector(collector, timeAllowed); } + if (pf.postFilter != null) { + pf.postFilter.setLastDelegate(collector); + collector = pf.postFilter; + } try { super.search(query, luceneFilter, collector); } @@ -1199,7 +1301,6 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { // be cached if desired. private DocSet getDocListAndSetNC(QueryResult qr,QueryCommand cmd) throws IOException { int len = cmd.getSupersetMaxDoc(); - DocSet filter = cmd.getFilter()!=null ? cmd.getFilter() : getDocSet(cmd.getFilterList()); int last = len; if (last < 0 || last > maxDoc()) last=maxDoc(); final int lastDocRequested = last; @@ -1214,11 +1315,12 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { int maxDoc = maxDoc(); int smallSetSize = maxDoc>>6; + ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList()); + final Filter luceneFilter = pf.filter; + Query query = QueryUtils.makeQueryable(cmd.getQuery()); final long timeAllowed = cmd.getTimeAllowed(); - final Filter luceneFilter = filter==null ? null : filter.getTopFilter(); - // handle zero case... if (lastDocRequested<=0) { final float[] topscore = new float[] { Float.NEGATIVE_INFINITY }; @@ -1253,6 +1355,11 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { if( timeAllowed > 0 ) { collector = new TimeLimitingCollector(collector, timeAllowed); } + if (pf.postFilter != null) { + pf.postFilter.setLastDelegate(collector); + collector = pf.postFilter; + } + try { super.search(query, luceneFilter, collector); } @@ -1284,6 +1391,10 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { if( timeAllowed > 0 ) { collector = new TimeLimitingCollector(collector, timeAllowed ); } + if (pf.postFilter != null) { + pf.postFilter.setLastDelegate(collector); + collector = pf.postFilter; + } try { super.search(query, luceneFilter, collector); } @@ -1320,7 +1431,7 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { // TODO: currently we don't generate the DocSet for the base query, // but the QueryDocSet == CompleteDocSet if filter==null. - return filter==null ? qr.getDocSet() : null; + return pf.filter==null && pf.postFilter==null ? qr.getDocSet() : null; } @@ -1933,3 +2044,133 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean { } +class FilterImpl extends Filter { + final DocSet filter; + final Filter topFilter; + final List weights; + + public FilterImpl(DocSet filter, List weights) { + this.filter = filter; + this.weights = weights; + this.topFilter = filter == null ? null : filter.getTopFilter(); + } + + @Override + public DocIdSet getDocIdSet(AtomicReaderContext context) throws IOException { + DocIdSet sub = topFilter == null ? null : topFilter.getDocIdSet(context); + if (weights.size() == 0) return sub; + return new FilterSet(sub, context); + } + + private class FilterSet extends DocIdSet { + DocIdSet docIdSet; + AtomicReaderContext context; + + public FilterSet(DocIdSet docIdSet, AtomicReaderContext context) { + this.docIdSet = docIdSet; + this.context = context; + } + + @Override + public DocIdSetIterator iterator() throws IOException { + List iterators = new ArrayList(weights.size()+1); + if (docIdSet != null) { + DocIdSetIterator iter = docIdSet.iterator(); + if (iter == null) return null; + iterators.add(iter); + } + for (Weight w : weights) { + Scorer scorer = w.scorer(context, Weight.ScorerContext.def()); + if (scorer == null) return null; + iterators.add(scorer); + } + if (iterators.size()==0) return null; + if (iterators.size()==1) return iterators.get(0); + if (iterators.size()==2) return new DualFilterIterator(iterators.get(0), iterators.get(1)); + return new FilterIterator(iterators.toArray(new DocIdSetIterator[iterators.size()])); + } + } + + private static class FilterIterator extends DocIdSetIterator { + final DocIdSetIterator[] iterators; + final DocIdSetIterator first; + + public FilterIterator(DocIdSetIterator[] iterators) { + this.iterators = iterators; + this.first = iterators[0]; + } + + @Override + public int docID() { + return first.docID(); + } + + private int doNext(int doc) throws IOException { + int which=0; // index of the iterator with the highest id + int i=1; + outer: for(;;) { + for (; i terms) { + q.extractTerms(terms); + } + + @Override + public Object clone() { + WrappedQuery newQ = (WrappedQuery)super.clone(); + newQ.q = (Query) q.clone(); + return newQ; + } + + @Override + public int hashCode() { + return q.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof WrappedQuery) { + return this.q.equals(((WrappedQuery)obj).q); + } + return q.equals(obj); + } + + @Override + public String toString(String field) { + return getOptions() + q.toString(); + } +} + diff --git a/solr/src/java/org/apache/solr/search/function/DocValues.java b/solr/src/java/org/apache/solr/search/function/DocValues.java index 889ef517948..eeafe802d2c 100644 --- a/solr/src/java/org/apache/solr/search/function/DocValues.java +++ b/solr/src/java/org/apache/solr/search/function/DocValues.java @@ -136,7 +136,7 @@ public abstract class DocValues { // A RangeValueSource can't easily be a ValueSource that takes another ValueSource // because it needs different behavior depending on the type of fields. There is also // a setup cost - parsing and normalizing params, and doing a binary search on the StringIndex. - + // TODO: change "reader" to AtomicReaderContext public ValueSourceScorer getRangeScorer(IndexReader reader, String lowerVal, String upperVal, boolean includeLower, boolean includeUpper) { float lower; float upper; diff --git a/solr/src/java/org/apache/solr/search/function/ValueSource.java b/solr/src/java/org/apache/solr/search/function/ValueSource.java index 3230ad8029f..a4026479046 100644 --- a/solr/src/java/org/apache/solr/search/function/ValueSource.java +++ b/solr/src/java/org/apache/solr/search/function/ValueSource.java @@ -193,64 +193,3 @@ public abstract class ValueSource implements Serializable { } -class ValueSourceScorer extends Scorer { - protected IndexReader reader; - private int doc = -1; - protected final int maxDoc; - protected final DocValues values; - protected boolean checkDeletes; - private final Bits delDocs; - - protected ValueSourceScorer(IndexReader reader, DocValues values) { - super(null); - this.reader = reader; - this.maxDoc = reader.maxDoc(); - this.values = values; - setCheckDeletes(true); - this.delDocs = MultiFields.getDeletedDocs(reader); - } - - public IndexReader getReader() { - return reader; - } - - public void setCheckDeletes(boolean checkDeletes) { - this.checkDeletes = checkDeletes && reader.hasDeletions(); - } - - public boolean matches(int doc) { - return (!checkDeletes || !delDocs.get(doc)) && matchesValue(doc); - } - - public boolean matchesValue(int doc) { - return true; - } - - @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() throws IOException { - for (; ;) { - doc++; - if (doc >= maxDoc) return doc = NO_MORE_DOCS; - if (matches(doc)) return doc; - } - } - - @Override - public int advance(int target) throws IOException { - // also works fine when target==NO_MORE_DOCS - doc = target - 1; - return nextDoc(); - } - - @Override - public float score() throws IOException { - return values.floatVal(doc); - } -} - - diff --git a/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java b/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java index d17d7d1db28..5040a5246e0 100755 --- a/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java +++ b/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java @@ -49,6 +49,27 @@ public class ValueSourceRangeFilter extends SolrFilter { this.includeUpper = upperVal != null && includeUpper; } + public ValueSource getValueSource() { + return valueSource; + } + + public String getLowerVal() { + return lowerVal; + } + + public String getUpperVal() { + return upperVal; + } + + public boolean isIncludeLower() { + return includeLower; + } + + public boolean isIncludeUpper() { + return includeUpper; + } + + @Override public DocIdSet getDocIdSet(final Map context, final AtomicReaderContext readerContext) throws IOException { return new DocIdSet() { diff --git a/solr/src/java/org/apache/solr/search/function/ValueSourceScorer.java b/solr/src/java/org/apache/solr/search/function/ValueSourceScorer.java new file mode 100644 index 00000000000..7a338520fbf --- /dev/null +++ b/solr/src/java/org/apache/solr/search/function/ValueSourceScorer.java @@ -0,0 +1,85 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search.function; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.MultiFields; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.util.Bits; + +import java.io.IOException; + +public class ValueSourceScorer extends Scorer { + protected IndexReader reader; + private int doc = -1; + protected final int maxDoc; + protected final DocValues values; + protected boolean checkDeletes; + private final Bits delDocs; + + protected ValueSourceScorer(IndexReader reader, DocValues values) { + super(null); + this.reader = reader; + this.maxDoc = reader.maxDoc(); + this.values = values; + setCheckDeletes(true); + this.delDocs = MultiFields.getDeletedDocs(reader); + } + + public IndexReader getReader() { + return reader; + } + + public void setCheckDeletes(boolean checkDeletes) { + this.checkDeletes = checkDeletes && reader.hasDeletions(); + } + + public boolean matches(int doc) { + return (!checkDeletes || !delDocs.get(doc)) && matchesValue(doc); + } + + public boolean matchesValue(int doc) { + return true; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + for (; ;) { + doc++; + if (doc >= maxDoc) return doc = NO_MORE_DOCS; + if (matches(doc)) return doc; + } + } + + @Override + public int advance(int target) throws IOException { + // also works fine when target==NO_MORE_DOCS + doc = target - 1; + return nextDoc(); + } + + @Override + public float score() throws IOException { + return values.floatVal(doc); + } +} diff --git a/solr/src/test-framework/org/apache/solr/SolrTestCaseJ4.java b/solr/src/test-framework/org/apache/solr/SolrTestCaseJ4.java index 9a5ce81827b..f97a09a367d 100755 --- a/solr/src/test-framework/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/src/test-framework/org/apache/solr/SolrTestCaseJ4.java @@ -430,6 +430,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase { } for (String test : tests) { + if (test == null || test.length()==0) continue; String testJSON = test.replace('\'', '"'); try { diff --git a/solr/src/test/org/apache/solr/search/TestFiltering.java b/solr/src/test/org/apache/solr/search/TestFiltering.java new file mode 100644 index 00000000000..aebabeb333e --- /dev/null +++ b/solr/src/test/org/apache/solr/search/TestFiltering.java @@ -0,0 +1,322 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.search; + + +import org.apache.lucene.util.OpenBitSet; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrException; +import org.apache.solr.request.SolrQueryRequest; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.*; + +public class TestFiltering extends SolrTestCaseJ4 { + + @BeforeClass + public static void beforeTests() throws Exception { + initCore("solrconfig.xml","schema12.xml"); + } + + + public void testCaching() throws Exception { + assertU(adoc("id","4", "val_i","1")); + assertU(adoc("id","1", "val_i","2")); + assertU(adoc("id","3", "val_i","3")); + assertU(adoc("id","2", "val_i","4")); + assertU(commit()); + + int prevCount; + + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("q","*:*", "fq","{!frange l=2 u=3 cache=false cost=100}val_i") + ,"/response/numFound==2" + ); + assertEquals(1, DelegatingCollector.setLastDelegateCount - prevCount); + + // The exact same query the second time will be cached by the queryCache + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("q","*:*", "fq","{!frange l=2 u=3 cache=false cost=100}val_i") + ,"/response/numFound==2" + ); + assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); + + // cache is true by default + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("q","*:*", "fq","{!frange l=2 u=4}val_i") + ,"/response/numFound==3" + ); + assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); + + // default cost avoids post filtering + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("q","*:*", "fq","{!frange l=2 u=5 cache=false}val_i") + ,"/response/numFound==3" + ); + assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); + + + // now re-do the same tests w/ faceting on to get the full docset + + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=6 cache=false cost=100}val_i") + ,"/response/numFound==3" + ); + assertEquals(1, DelegatingCollector.setLastDelegateCount - prevCount); + + // since we need the docset and the filter was not cached, the collector will need to be used again + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=6 cache=false cost=100}val_i") + ,"/response/numFound==3" + ); + assertEquals(1, DelegatingCollector.setLastDelegateCount - prevCount); + + // cache is true by default + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=7}val_i") + ,"/response/numFound==3" + ); + assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); + + // default cost avoids post filtering + prevCount = DelegatingCollector.setLastDelegateCount; + assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=8 cache=false}val_i") + ,"/response/numFound==3" + ); + assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); + + + } + + + class Model { + int indexSize; + OpenBitSet answer; + OpenBitSet multiSelect; + OpenBitSet facetQuery; + + void clear() { + answer = new OpenBitSet(indexSize); + answer.set(0, indexSize); + + multiSelect = new OpenBitSet(indexSize); + multiSelect.set(0, indexSize); + + facetQuery = new OpenBitSet(indexSize); + facetQuery.set(0, indexSize); + } + } + + static String f = "val_i"; + + String frangeStr(boolean negative, int l, int u, boolean cache, int cost, boolean exclude) { + + String topLev=""; + if (!cache || exclude) { + topLev = "" + (cache || random.nextBoolean() ? " cache="+cache : "") + + (cost!=0 ? " cost="+cost : "") + + ((exclude) ? " tag=t" : ""); + } + + String ret = "{!frange v="+f+" l="+l+" u="+u; + if (negative) { + ret = "-_query_:\"" + ret + "}\""; + if (topLev.length()>0) { + ret = "{!" + topLev + "}" + ret; // add options at top level (can't be on frange) + } + } else { + ret += topLev + "}"; // add options right to frange + } + + return ret; + } + + String makeRandomQuery(Model model, boolean mainQuery, boolean facetQuery) { + + boolean cache = random.nextBoolean(); + int cost = cache ? 0 : random.nextBoolean() ? random.nextInt(200) : 0; + boolean positive = random.nextBoolean(); + boolean exclude = facetQuery ? false : random.nextBoolean(); // can't exclude a facet query from faceting + + OpenBitSet[] sets = facetQuery ? new OpenBitSet[]{model.facetQuery} : + (exclude ? new OpenBitSet[]{model.answer, model.facetQuery} : new OpenBitSet[]{model.answer, model.multiSelect, model.facetQuery}); + + if (random.nextInt(100) < 50) { + // frange + int l=0; + int u=0; + + if (positive) { + // positive frange, make it big by taking the max of 4 tries + int n=-1; + + for (int i=0; i<4; i++) { + int ll = random.nextInt(model.indexSize); + int uu = ll + ((ll==model.indexSize-1) ? 0 : random.nextInt(model.indexSize-l)); + if (uu-ll+1 > n) { + n = uu-ll+1; + u = uu; + l = ll; + } + } + + for (OpenBitSet set : sets) { + set.clear(0,l); + set.clear(u+1, model.indexSize); + } + } else { + // negative frange.. make it relatively small + l = random.nextInt(model.indexSize); + u = Math.max(model.indexSize-1, l+random.nextInt(Math.max(model.indexSize / 10, 2))); + + for (OpenBitSet set : sets) { + set.clear(l,u+1); + } + } + + return frangeStr(!positive, l, u, cache, cost, exclude); + } else { + // term or boolean query + OpenBitSet pset = new OpenBitSet(model.indexSize); + for (int i=0; i= model.indexSize) break; + sb.append((positive ? " ":" -") + f+":"+doc); + } + + String ret = sb.toString(); + if (ret.length()==0) ret = (positive ? "":"-") + "id:99999999"; + + if (!cache || exclude || random.nextBoolean()) { + ret = "{!cache=" + cache + + ((cost != 0) ? " cost="+cost : "") + + ((exclude) ? " tag=t" : "") + + "}" + ret; + } + + return ret; + } + } + + @Test + public void testRandomFiltering() throws Exception { + int indexIter=5 * RANDOM_MULTIPLIER; + int queryIter=250 * RANDOM_MULTIPLIER; + Model model = new Model(); + + for (int iiter = 0; iiter params = new ArrayList(); + params.add("q"); params.add(makeRandomQuery(model, true, false)); + + int nFilters = random.nextInt(5); + for (int i=0; i filters = new ArrayList(); filters.add(rangeQ); req.close(); parser = QParser.getParser("{!dismax qf=t10_100_ws pf=t10_100_ws ps=20}"+ t(0) + ' ' + t(1) + ' ' + t(2), null, req); - Query q= parser.parse(); + Query q= parser.getQuery(); // SolrIndexSearcher searcher = req.getSearcher(); // DocSet range = searcher.getDocSet(rangeQ, null);