mirror of https://github.com/apache/lucene.git
SOLR-3195: timeAllowed is ignored for grouping queries
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1296996 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1ded74e10d
commit
60d9caa11e
lucene/core/src/java/org/apache/lucene/search
solr
CHANGES.txt
core/src
java/org/apache/solr
handler/component
search
test/org/apache/solr
|
@ -17,12 +17,12 @@ package org.apache.lucene.search;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.util.Counter;
|
import org.apache.lucene.util.Counter;
|
||||||
import org.apache.lucene.util.ThreadInterruptedException;
|
import org.apache.lucene.util.ThreadInterruptedException;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link TimeLimitingCollector} is used to timeout search requests that
|
* The {@link TimeLimitingCollector} is used to timeout search requests that
|
||||||
* take longer than the maximum allowed search time limit. After this time is
|
* take longer than the maximum allowed search time limit. After this time is
|
||||||
|
@ -60,7 +60,7 @@ public class TimeLimitingCollector extends Collector {
|
||||||
|
|
||||||
private long t0 = Long.MIN_VALUE;
|
private long t0 = Long.MIN_VALUE;
|
||||||
private long timeout = Long.MIN_VALUE;
|
private long timeout = Long.MIN_VALUE;
|
||||||
private final Collector collector;
|
private Collector collector;
|
||||||
private final Counter clock;
|
private final Counter clock;
|
||||||
private final long ticksAllowed;
|
private final long ticksAllowed;
|
||||||
private boolean greedy = false;
|
private boolean greedy = false;
|
||||||
|
@ -172,6 +172,17 @@ public class TimeLimitingCollector extends Collector {
|
||||||
public boolean acceptsDocsOutOfOrder() {
|
public boolean acceptsDocsOutOfOrder() {
|
||||||
return collector.acceptsDocsOutOfOrder();
|
return collector.acceptsDocsOutOfOrder();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is so the same timer can be used with a multi-phase search process such as grouping.
|
||||||
|
* We don't want to create a new TimeLimitingCollector for each phase because that would
|
||||||
|
* reset the timer for each phase. Once time is up subsequent phases need to timeout quickly.
|
||||||
|
*
|
||||||
|
* @param collector The actual collector performing search functionality
|
||||||
|
*/
|
||||||
|
public void setCollector(Collector collector) {
|
||||||
|
this.collector = collector;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -632,6 +632,9 @@ Bug Fixes
|
||||||
* SOLR-3168: ReplicationHandler "numberToKeep" & "maxNumberOfBackups" parameters
|
* SOLR-3168: ReplicationHandler "numberToKeep" & "maxNumberOfBackups" parameters
|
||||||
would keep only 1 backup, even if more than 1 was specified (Neil Hooey, James Dyer)
|
would keep only 1 backup, even if more than 1 was specified (Neil Hooey, James Dyer)
|
||||||
|
|
||||||
|
* SOLR-3195: timeAllowed is ignored for grouping queries
|
||||||
|
(Russell Black via Martijn van Groningen)
|
||||||
|
|
||||||
Other Changes
|
Other Changes
|
||||||
----------------------
|
----------------------
|
||||||
* SOLR-2922: Upgrade commons-io and commons-lang to 2.1 and 2.6, respectively. (koji)
|
* SOLR-2922: Upgrade commons-io and commons-lang to 2.1 and 2.6, respectively. (koji)
|
||||||
|
|
|
@ -172,6 +172,8 @@ public class ResponseBuilder
|
||||||
public final Map<String, TopGroups<BytesRef>> mergedTopGroups = new HashMap<String, TopGroups<BytesRef>>();
|
public final Map<String, TopGroups<BytesRef>> mergedTopGroups = new HashMap<String, TopGroups<BytesRef>>();
|
||||||
public final Map<String, QueryCommandResult> mergedQueryCommandResults = new HashMap<String, QueryCommandResult>();
|
public final Map<String, QueryCommandResult> mergedQueryCommandResults = new HashMap<String, QueryCommandResult>();
|
||||||
public final Map<Object, SolrDocument> retrievedDocuments = new HashMap<Object, SolrDocument>();
|
public final Map<Object, SolrDocument> retrievedDocuments = new HashMap<Object, SolrDocument>();
|
||||||
|
// Used for timeAllowed parameter. First phase elapsed time is subtracted from the time allowed for the second phase.
|
||||||
|
public int firstPhaseElapsedTime;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility function to add debugging info. This will make sure a valid
|
* Utility function to add debugging info. This will make sure a valid
|
||||||
|
|
|
@ -90,6 +90,7 @@ public class Grouping {
|
||||||
private int maxMatches; // max number of matches from any grouping command
|
private int maxMatches; // max number of matches from any grouping command
|
||||||
private float maxScore = Float.NEGATIVE_INFINITY; // max score seen in any doclist
|
private float maxScore = Float.NEGATIVE_INFINITY; // max score seen in any doclist
|
||||||
private boolean signalCacheWarning = false;
|
private boolean signalCacheWarning = false;
|
||||||
|
private TimeLimitingCollector timeLimitingCollector;
|
||||||
|
|
||||||
|
|
||||||
public DocList mainResult; // output if one of the grouping commands should be used as the main result.
|
public DocList mainResult; // output if one of the grouping commands should be used as the main result.
|
||||||
|
@ -348,7 +349,7 @@ public class Grouping {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allCollectors != null) {
|
if (allCollectors != null) {
|
||||||
searcher.search(query, luceneFilter, allCollectors);
|
searchWithTimeLimiter(luceneFilter, allCollectors);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (getGroupedDocSet && allGroupHeadsCollector != null) {
|
if (getGroupedDocSet && allGroupHeadsCollector != null) {
|
||||||
|
@ -377,14 +378,14 @@ public class Grouping {
|
||||||
signalCacheWarning = true;
|
signalCacheWarning = true;
|
||||||
logger.warn(String.format("The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache));
|
logger.warn(String.format("The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache));
|
||||||
logger.warn("Please increase cache size or disable group caching.");
|
logger.warn("Please increase cache size or disable group caching.");
|
||||||
searcher.search(query, luceneFilter, secondPhaseCollectors);
|
searchWithTimeLimiter(luceneFilter, secondPhaseCollectors);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (pf.postFilter != null) {
|
if (pf.postFilter != null) {
|
||||||
pf.postFilter.setLastDelegate(secondPhaseCollectors);
|
pf.postFilter.setLastDelegate(secondPhaseCollectors);
|
||||||
secondPhaseCollectors = pf.postFilter;
|
secondPhaseCollectors = pf.postFilter;
|
||||||
}
|
}
|
||||||
searcher.search(query, luceneFilter, secondPhaseCollectors);
|
searchWithTimeLimiter(luceneFilter, secondPhaseCollectors);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -406,6 +407,33 @@ public class Grouping {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invokes search with the specified filter and collector.
|
||||||
|
* If a time limit has been specified, wrap the collector in a TimeLimitingCollector
|
||||||
|
*/
|
||||||
|
private void searchWithTimeLimiter(final Filter luceneFilter, Collector collector) throws IOException {
|
||||||
|
if (cmd.getTimeAllowed() > 0) {
|
||||||
|
if (timeLimitingCollector == null) {
|
||||||
|
timeLimitingCollector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), cmd.getTimeAllowed());
|
||||||
|
} else {
|
||||||
|
/*
|
||||||
|
* This is so the same timer can be used for grouping's multiple phases.
|
||||||
|
* We don't want to create a new TimeLimitingCollector for each phase because that would
|
||||||
|
* reset the timer for each phase. If time runs out during the first phase, the
|
||||||
|
* second phase should timeout quickly.
|
||||||
|
*/
|
||||||
|
timeLimitingCollector.setCollector(collector);
|
||||||
|
}
|
||||||
|
collector = timeLimitingCollector;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
searcher.search(query, luceneFilter, collector);
|
||||||
|
} catch (TimeLimitingCollector.TimeExceededException x) {
|
||||||
|
logger.warn( "Query: " + query + "; " + x.getMessage() );
|
||||||
|
qr.setPartialResults(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns offset + len if len equals zero or higher. Otherwise returns max.
|
* Returns offset + len if len equals zero or higher. Otherwise returns max.
|
||||||
*
|
*
|
||||||
|
@ -982,4 +1010,4 @@ public class Grouping {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,12 +21,15 @@ import org.apache.lucene.search.Collector;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.MultiCollector;
|
import org.apache.lucene.search.MultiCollector;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.TimeLimitingCollector;
|
||||||
import org.apache.lucene.search.grouping.AbstractAllGroupHeadsCollector;
|
import org.apache.lucene.search.grouping.AbstractAllGroupHeadsCollector;
|
||||||
import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
|
import org.apache.lucene.search.grouping.term.TermAllGroupHeadsCollector;
|
||||||
import org.apache.lucene.util.OpenBitSet;
|
import org.apache.lucene.util.OpenBitSet;
|
||||||
import org.apache.solr.common.util.NamedList;
|
import org.apache.solr.common.util.NamedList;
|
||||||
import org.apache.solr.search.*;
|
import org.apache.solr.search.*;
|
||||||
import org.apache.solr.search.grouping.distributed.shardresultserializer.ShardResultTransformer;
|
import org.apache.solr.search.grouping.distributed.shardresultserializer.ShardResultTransformer;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -91,11 +94,14 @@ public class CommandHandler {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private final static Logger logger = LoggerFactory.getLogger(CommandHandler.class);
|
||||||
|
|
||||||
private final SolrIndexSearcher.QueryCommand queryCommand;
|
private final SolrIndexSearcher.QueryCommand queryCommand;
|
||||||
private final List<Command> commands;
|
private final List<Command> commands;
|
||||||
private final SolrIndexSearcher searcher;
|
private final SolrIndexSearcher searcher;
|
||||||
private final boolean needDocset;
|
private final boolean needDocset;
|
||||||
private final boolean truncateGroups;
|
private final boolean truncateGroups;
|
||||||
|
private boolean partialResults = false;
|
||||||
|
|
||||||
private DocSet docSet;
|
private DocSet docSet;
|
||||||
|
|
||||||
|
@ -129,7 +135,7 @@ public class CommandHandler {
|
||||||
} else if (needDocset) {
|
} else if (needDocset) {
|
||||||
docSet = computeDocSet(query, luceneFilter, collectors);
|
docSet = computeDocSet(query, luceneFilter, collectors);
|
||||||
} else {
|
} else {
|
||||||
searcher.search(query, luceneFilter, MultiCollector.wrap(collectors.toArray(new Collector[nrOfCommands])));
|
searchWithTimeLimiter(query, luceneFilter, MultiCollector.wrap(collectors.toArray(new Collector[nrOfCommands])));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,10 +144,10 @@ public class CommandHandler {
|
||||||
AbstractAllGroupHeadsCollector termAllGroupHeadsCollector =
|
AbstractAllGroupHeadsCollector termAllGroupHeadsCollector =
|
||||||
TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
|
TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
|
||||||
if (collectors.isEmpty()) {
|
if (collectors.isEmpty()) {
|
||||||
searcher.search(query, luceneFilter, termAllGroupHeadsCollector);
|
searchWithTimeLimiter(query, luceneFilter, termAllGroupHeadsCollector);
|
||||||
} else {
|
} else {
|
||||||
collectors.add(termAllGroupHeadsCollector);
|
collectors.add(termAllGroupHeadsCollector);
|
||||||
searcher.search(query, luceneFilter, MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()])));
|
searchWithTimeLimiter(query, luceneFilter, MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()])));
|
||||||
}
|
}
|
||||||
|
|
||||||
int maxDoc = searcher.maxDoc();
|
int maxDoc = searcher.maxDoc();
|
||||||
|
@ -158,7 +164,7 @@ public class CommandHandler {
|
||||||
Collector wrappedCollectors = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
|
Collector wrappedCollectors = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
|
||||||
docSetCollector = new DocSetDelegateCollector(maxDoc >> 6, maxDoc, wrappedCollectors);
|
docSetCollector = new DocSetDelegateCollector(maxDoc >> 6, maxDoc, wrappedCollectors);
|
||||||
}
|
}
|
||||||
searcher.search(query, luceneFilter, docSetCollector);
|
searchWithTimeLimiter(query, luceneFilter, docSetCollector);
|
||||||
return docSetCollector.getDocSet();
|
return docSetCollector.getDocSet();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,7 +173,24 @@ public class CommandHandler {
|
||||||
if (docSet != null) {
|
if (docSet != null) {
|
||||||
queryResult.setDocSet(docSet);
|
queryResult.setDocSet(docSet);
|
||||||
}
|
}
|
||||||
|
queryResult.setPartialResults(partialResults);
|
||||||
return transformer.transform(commands);
|
return transformer.transform(commands);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invokes search with the specified filter and collector.
|
||||||
|
* If a time limit has been specified then wrap the collector in the TimeLimitingCollector
|
||||||
|
*/
|
||||||
|
private void searchWithTimeLimiter(final Query query, final Filter luceneFilter, Collector collector) throws IOException {
|
||||||
|
if (queryCommand.getTimeAllowed() > 0 ) {
|
||||||
|
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), queryCommand.getTimeAllowed());
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
searcher.search(query, luceneFilter, collector);
|
||||||
|
} catch (TimeLimitingCollector.TimeExceededException x) {
|
||||||
|
partialResults = true;
|
||||||
|
logger.warn( "Query: " + query + "; " + x.getMessage() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -130,6 +130,11 @@ public class TopGroupsShardRequestFactory implements ShardRequestFactory {
|
||||||
} else {
|
} else {
|
||||||
sreq.params.set(CommonParams.FL, rb.req.getSchema().getUniqueKeyField().getName());
|
sreq.params.set(CommonParams.FL, rb.req.getSchema().getUniqueKeyField().getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int origTimeAllowed = sreq.params.getInt(CommonParams.TIME_ALLOWED, -1);
|
||||||
|
if (origTimeAllowed > 0) {
|
||||||
|
sreq.params.set(CommonParams.TIME_ALLOWED, Math.max(1,origTimeAllowed - rb.firstPhaseElapsedTime));
|
||||||
|
}
|
||||||
|
|
||||||
return new ShardRequest[] {sreq};
|
return new ShardRequest[] {sreq};
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,9 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
|
||||||
|
|
||||||
SearchGroupsResultTransformer serializer = new SearchGroupsResultTransformer(rb.req.getSearcher());
|
SearchGroupsResultTransformer serializer = new SearchGroupsResultTransformer(rb.req.getSearcher());
|
||||||
try {
|
try {
|
||||||
|
int maxElapsedTime = 0;
|
||||||
for (ShardResponse srsp : shardRequest.responses) {
|
for (ShardResponse srsp : shardRequest.responses) {
|
||||||
|
maxElapsedTime = (int) Math.max(maxElapsedTime, srsp.getSolrResponse().getElapsedTime());
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
NamedList<NamedList> firstPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("firstPhase");
|
NamedList<NamedList> firstPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("firstPhase");
|
||||||
Map<String, Collection<SearchGroup<BytesRef>>> result = serializer.transformToNative(firstPhaseResult, groupSort, null, srsp.getShard());
|
Map<String, Collection<SearchGroup<BytesRef>>> result = serializer.transformToNative(firstPhaseResult, groupSort, null, srsp.getShard());
|
||||||
|
@ -79,6 +81,7 @@ public class SearchGroupShardResponseProcessor implements ShardResponseProcessor
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
rb.firstPhaseElapsedTime = maxElapsedTime;
|
||||||
for (String groupField : commandSearchGroups.keySet()) {
|
for (String groupField : commandSearchGroups.keySet()) {
|
||||||
List<Collection<SearchGroup<BytesRef>>> topGroups = commandSearchGroups.get(groupField);
|
List<Collection<SearchGroup<BytesRef>>> topGroups = commandSearchGroups.get(groupField);
|
||||||
Collection<SearchGroup<BytesRef>> mergedTopGroups = SearchGroup.merge(topGroups, ss.getOffset(), ss.getCount(), groupSort);
|
Collection<SearchGroup<BytesRef>> mergedTopGroups = SearchGroup.merge(topGroups, ss.getOffset(), ss.getCount(), groupSort);
|
||||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.solr;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.solr.client.solrj.SolrServerException;
|
import org.apache.solr.client.solrj.SolrServerException;
|
||||||
|
import org.apache.solr.common.params.CommonParams;
|
||||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -169,6 +170,9 @@ public class TestDistributedGrouping extends BaseDistributedSearchTestCase {
|
||||||
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " desc", "group.sort", "score desc"); // SOLR-2955
|
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " desc", "group.sort", "score desc"); // SOLR-2955
|
||||||
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", "score desc, _docid_ asc, id asc");
|
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", "score desc, _docid_ asc, id asc");
|
||||||
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10);
|
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10);
|
||||||
|
|
||||||
|
// Can't validate the response, but can check if no errors occur.
|
||||||
|
simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc", CommonParams.TIME_ALLOWED, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void simpleQuery(Object... queryParams) throws SolrServerException {
|
private void simpleQuery(Object... queryParams) throws SolrServerException {
|
||||||
|
|
|
@ -214,6 +214,19 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGroupingWithTimeAllowed() throws Exception {
|
||||||
|
assertU(add(doc("id", "1")));
|
||||||
|
assertU(add(doc("id", "2")));
|
||||||
|
assertU(add(doc("id", "3")));
|
||||||
|
assertU(add(doc("id", "4")));
|
||||||
|
assertU(add(doc("id", "5")));
|
||||||
|
assertU(commit());
|
||||||
|
|
||||||
|
// Just checking if no errors occur
|
||||||
|
assertJQ(req("q", "*:*", "group", "true", "group.query", "id:1", "group.query", "id:2", "timeAllowed", "1"));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGroupingSortByFunction() throws Exception {
|
public void testGroupingSortByFunction() throws Exception {
|
||||||
assertU(add(doc("id", "1", "value1_i", "1", "value2_i", "1", "store", "45.18014,-93.87742")));
|
assertU(add(doc("id", "1", "value1_i", "1", "value2_i", "1", "store", "45.18014,-93.87742")));
|
||||||
|
|
Loading…
Reference in New Issue