Added notion of Rewrite that replaces ScopePhase
This commit is contained in:
parent
d4ef4697d5
commit
371b071fb7
|
@ -126,7 +126,7 @@ public class HasChildFilterParser implements FilterParser {
|
|||
SearchContext searchContext = SearchContext.current();
|
||||
|
||||
HasChildFilter childFilter = HasChildFilter.create(query, null, parentType, childType, searchContext, executionType);
|
||||
searchContext.addScopePhase(childFilter);
|
||||
searchContext.addRewrite(childFilter);
|
||||
|
||||
if (filterName != null) {
|
||||
parseContext.addNamedFilter(filterName, childFilter);
|
||||
|
|
|
@ -125,11 +125,11 @@ public class HasChildQueryParser implements QueryParser {
|
|||
if (scoreType != null) {
|
||||
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
|
||||
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, null, innerQuery, scoreType);
|
||||
searchContext.addScopePhase(childrenQuery);
|
||||
searchContext.addRewrite(childrenQuery);
|
||||
query = childrenQuery;
|
||||
} else {
|
||||
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, null, parentType, childType, searchContext, executionType);
|
||||
searchContext.addScopePhase(hasChildFilter);
|
||||
searchContext.addRewrite(hasChildFilter);
|
||||
query = new ConstantScoreQuery(hasChildFilter);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
|
|
|
@ -122,7 +122,7 @@ public class HasParentFilterParser implements FilterParser {
|
|||
SearchContext searchContext = SearchContext.current();
|
||||
|
||||
HasParentFilter parentFilter = HasParentFilter.create(executionType, query, null, parentType, searchContext);
|
||||
searchContext.addScopePhase(parentFilter);
|
||||
searchContext.addRewrite(parentFilter);
|
||||
|
||||
if (filterName != null) {
|
||||
parseContext.addNamedFilter(filterName, parentFilter);
|
||||
|
|
|
@ -150,11 +150,11 @@ public class HasParentQueryParser implements QueryParser {
|
|||
Query query;
|
||||
if (score) {
|
||||
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter, null);
|
||||
searchContext.addScopePhase(parentQuery);
|
||||
searchContext.addRewrite(parentQuery);
|
||||
query = parentQuery;
|
||||
} else {
|
||||
HasParentFilter hasParentFilter = HasParentFilter.create(executionType, innerQuery, null, parentType, searchContext);
|
||||
searchContext.addScopePhase(hasParentFilter);
|
||||
searchContext.addRewrite(hasParentFilter);
|
||||
query = new ConstantScoreQuery(hasParentFilter);
|
||||
}
|
||||
query.setBoost(boost);
|
||||
|
|
|
@ -122,7 +122,7 @@ public class TopChildrenQueryParser implements QueryParser {
|
|||
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
TopChildrenQuery childQuery = new TopChildrenQuery(query, null, childType, parentType, scoreType, factor, incrementalFactor);
|
||||
searchContext.addScopePhase(childQuery);
|
||||
searchContext.addRewrite(childQuery);
|
||||
return childQuery;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.common.CacheRecycler;
|
|||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -45,7 +44,7 @@ import java.util.Set;
|
|||
* connects the matching child docs to the related parent documents
|
||||
* using the {@link IdReaderTypeCache}.
|
||||
*/
|
||||
public class ChildrenQuery extends Query implements ScopePhase.CollectorPhase {
|
||||
public class ChildrenQuery extends Query implements SearchContext.Rewrite {
|
||||
|
||||
private final SearchContext searchContext;
|
||||
private final String parentType;
|
||||
|
@ -96,9 +95,9 @@ public class ChildrenQuery extends Query implements ScopePhase.CollectorPhase {
|
|||
return this;
|
||||
}
|
||||
|
||||
int index = searchContext.scopePhases().indexOf(this);
|
||||
int index = searchContext.rewrites().indexOf(this);
|
||||
ChildrenQuery rewrite = new ChildrenQuery(this, rewrittenChildQuery);
|
||||
searchContext.scopePhases().set(index, rewrite);
|
||||
searchContext.rewrites().set(index, rewrite);
|
||||
return rewrite;
|
||||
}
|
||||
|
||||
|
@ -108,34 +107,24 @@ public class ChildrenQuery extends Query implements ScopePhase.CollectorPhase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean requiresProcessing() {
|
||||
return uidToScore == null;
|
||||
}
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
|
||||
@Override
|
||||
public Collector collector() {
|
||||
uidToScore = CacheRecycler.popObjectFloatMap();
|
||||
Collector collector;
|
||||
switch (scoreType) {
|
||||
case AVG:
|
||||
uidToCount = CacheRecycler.popObjectIntMap();
|
||||
return new AvgChildUidCollector(scoreType, searchContext, parentType, uidToScore, uidToCount);
|
||||
collector = new AvgChildUidCollector(scoreType, searchContext, parentType, uidToScore, uidToCount);
|
||||
break;
|
||||
default:
|
||||
return new ChildUidCollector(scoreType, searchContext, parentType, uidToScore);
|
||||
collector = new ChildUidCollector(scoreType, searchContext, parentType, uidToScore);
|
||||
}
|
||||
searchContext.searcher().search(childQuery, collector);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processCollector(Collector collector) {
|
||||
// Do nothing, we already have the references to the child scores and optionally the child count.
|
||||
}
|
||||
|
||||
@Override
|
||||
public String scope() {
|
||||
return scope;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
public void contextClear() {
|
||||
if (uidToScore != null) {
|
||||
CacheRecycler.pushObjectFloatMap(uidToScore);
|
||||
}
|
||||
|
@ -146,11 +135,6 @@ public class ChildrenQuery extends Query implements ScopePhase.CollectorPhase {
|
|||
uidToCount = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query query() {
|
||||
return childQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
||||
if (uidToScore == null) {
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.search.child;
|
|||
import gnu.trove.set.hash.THashSet;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -35,7 +34,6 @@ import org.elasticsearch.common.bytes.HashedBytesArray;
|
|||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -44,7 +42,7 @@ import java.util.Map;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class HasChildFilter extends Filter implements ScopePhase.CollectorPhase {
|
||||
public abstract class HasChildFilter extends Filter implements SearchContext.Rewrite {
|
||||
|
||||
final Query childQuery;
|
||||
final String scope;
|
||||
|
@ -93,22 +91,6 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
|||
super(childQuery, scope, parentType, childType, searchContext);
|
||||
}
|
||||
|
||||
public boolean requiresProcessing() {
|
||||
return parentDocs == null;
|
||||
}
|
||||
|
||||
public Collector collector() {
|
||||
return new ChildCollector(parentType, searchContext);
|
||||
}
|
||||
|
||||
public void processCollector(Collector collector) {
|
||||
this.parentDocs = ((ChildCollector) collector).parentDocs();
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
parentDocs = null;
|
||||
}
|
||||
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||
if (parentDocs == null) {
|
||||
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
|
||||
|
@ -120,6 +102,18 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
|||
return parentDocs.get(context.reader().getCoreCacheKey());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
ChildCollector collector = new ChildCollector(parentType, searchContext);
|
||||
searchContext.searcher().search(childQuery, collector);
|
||||
this.parentDocs = collector.parentDocs();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void contextClear() {
|
||||
parentDocs = null;
|
||||
}
|
||||
}
|
||||
|
||||
static class Uid extends HasChildFilter {
|
||||
|
@ -130,19 +124,6 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
|||
super(childQuery, scope, parentType, childType, searchContext);
|
||||
}
|
||||
|
||||
public boolean requiresProcessing() {
|
||||
return collectedUids == null;
|
||||
}
|
||||
|
||||
public Collector collector() {
|
||||
collectedUids = CacheRecycler.popHashSet();
|
||||
return new UidCollector(parentType, searchContext, collectedUids);
|
||||
}
|
||||
|
||||
public void processCollector(Collector collector) {
|
||||
collectedUids = ((UidCollector) collector).collectedUids;
|
||||
}
|
||||
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||
if (collectedUids == null) {
|
||||
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
|
||||
|
@ -156,7 +137,16 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
|||
}
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
@Override
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
collectedUids = CacheRecycler.popHashSet();
|
||||
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids);
|
||||
searchContext.searcher().search(childQuery, collector);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void contextClear() {
|
||||
if (collectedUids != null) {
|
||||
CacheRecycler.pushHashSet(collectedUids);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import gnu.trove.set.hash.THashSet;
|
|||
import org.apache.lucene.index.AtomicReader;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -37,7 +36,6 @@ import org.elasticsearch.common.collect.Tuple;
|
|||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -48,7 +46,7 @@ import static com.google.common.collect.Maps.newHashMap;
|
|||
/**
|
||||
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
|
||||
*/
|
||||
public abstract class HasParentFilter extends Filter implements ScopePhase.CollectorPhase {
|
||||
public abstract class HasParentFilter extends Filter implements SearchContext.Rewrite {
|
||||
|
||||
final Query parentQuery;
|
||||
final String scope;
|
||||
|
@ -95,19 +93,6 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
|||
super(query, scope, parentType, context);
|
||||
}
|
||||
|
||||
public boolean requiresProcessing() {
|
||||
return parents == null;
|
||||
}
|
||||
|
||||
public Collector collector() {
|
||||
parents = CacheRecycler.popHashSet();
|
||||
return new ParentUidsCollector(parents, context, parentType);
|
||||
}
|
||||
|
||||
public void processCollector(Collector collector) {
|
||||
parents = ((ParentUidsCollector) collector).collectedUids;
|
||||
}
|
||||
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
||||
if (parents == null) {
|
||||
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
|
||||
|
@ -121,7 +106,17 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
|||
}
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
@Override
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
parents = CacheRecycler.popHashSet();
|
||||
ParentUidsCollector collector = new ParentUidsCollector(parents, context, parentType);
|
||||
searchContext.searcher().search(parentQuery, collector);
|
||||
parents = collector.collectedUids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void contextClear() {
|
||||
if (parents != null) {
|
||||
CacheRecycler.pushHashSet(parents);
|
||||
}
|
||||
|
@ -185,18 +180,6 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
|||
super(query, scope, parentType, context);
|
||||
}
|
||||
|
||||
public boolean requiresProcessing() {
|
||||
return parentDocs == null;
|
||||
}
|
||||
|
||||
public Collector collector() {
|
||||
return new ParentDocsCollector();
|
||||
}
|
||||
|
||||
public void processCollector(Collector collector) {
|
||||
parentDocs = ((ParentDocsCollector) collector).segmentResults;
|
||||
}
|
||||
|
||||
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
||||
if (parentDocs == null) {
|
||||
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
|
||||
|
@ -210,7 +193,16 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
|||
}
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
@Override
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
ParentDocsCollector collector = new ParentDocsCollector();
|
||||
searchContext.searcher().search(parentQuery, collector);
|
||||
parentDocs = collector.segmentResults;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void contextClear() {
|
||||
parentDocs = null;
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -44,7 +43,7 @@ import java.util.Set;
|
|||
* connects the matching parent docs to the related child documents
|
||||
* using the {@link IdReaderTypeCache}.
|
||||
*/
|
||||
public class ParentQuery extends Query implements ScopePhase.CollectorPhase {
|
||||
public class ParentQuery extends Query implements SearchContext.Rewrite {
|
||||
|
||||
private final SearchContext searchContext;
|
||||
private final Query parentQuery;
|
||||
|
@ -76,39 +75,21 @@ public class ParentQuery extends Query implements ScopePhase.CollectorPhase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean requiresProcessing() {
|
||||
return uidToScore == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collector collector() {
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
uidToScore = CacheRecycler.popObjectFloatMap();
|
||||
return new ParentUidCollector(uidToScore, searchContext, parentType);
|
||||
ParentUidCollector collector = new ParentUidCollector(uidToScore, searchContext, parentType);
|
||||
searchContext.searcher().search(parentQuery, collector);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processCollector(Collector collector) {
|
||||
// Do nothing, we already have the references to the parent scores.
|
||||
}
|
||||
|
||||
@Override
|
||||
public String scope() {
|
||||
return scope;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
public void contextClear() {
|
||||
if (uidToScore != null) {
|
||||
CacheRecycler.pushObjectFloatMap(uidToScore);
|
||||
}
|
||||
uidToScore = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query query() {
|
||||
return parentQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
@ -125,8 +106,8 @@ public class ParentQuery extends Query implements ScopePhase.CollectorPhase {
|
|||
return this;
|
||||
}
|
||||
ParentQuery rewrite = new ParentQuery(this, rewrittenChildQuery);
|
||||
int index = searchContext.scopePhases().indexOf(this);
|
||||
searchContext.scopePhases().set(index, rewrite);
|
||||
int index = searchContext.rewrites().indexOf(this);
|
||||
searchContext.rewrites().set(index, rewrite);
|
||||
return rewrite;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.util.ToStringUtils;
|
|||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.search.EmptyScorer;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -36,7 +35,7 @@ import java.util.*;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||
public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
|
||||
|
||||
private Query query;
|
||||
|
||||
|
@ -73,38 +72,52 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query query() {
|
||||
return this;
|
||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
|
||||
int numDocs = (searchContext.from() + searchContext.size());
|
||||
if (numDocs == 0) {
|
||||
numDocs = 1;
|
||||
}
|
||||
numDocs *= factor;
|
||||
while (true) {
|
||||
clear();
|
||||
// if (topDocsPhase.scope() != null) {
|
||||
// searchContext.searcher().processingScope(topDocsPhase.scope());
|
||||
// }
|
||||
TopDocs topDocs = searchContext.searcher().search(query, numDocs);
|
||||
// if (topDocsPhase.scope() != null) {
|
||||
// we mark the scope as processed, so we don't process it again, even if we need to rerun the query...
|
||||
// searchContext.searcher().processedScope();
|
||||
// }
|
||||
processResults(topDocs, searchContext);
|
||||
|
||||
// check if we found enough docs, if so, break
|
||||
if (numHits >= (searchContext.from() + searchContext.size())) {
|
||||
break;
|
||||
}
|
||||
// if we did not find enough docs, check if it make sense to search further
|
||||
if (topDocs.totalHits <= numDocs) {
|
||||
break;
|
||||
}
|
||||
// if not, update numDocs, and search again
|
||||
numDocs *= incrementalFactor;
|
||||
if (numDocs > topDocs.totalHits) {
|
||||
numDocs = topDocs.totalHits;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String scope() {
|
||||
return scope;
|
||||
public void contextClear() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
void clear() {
|
||||
properlyInvoked[0] = true;
|
||||
parentDocs = null;
|
||||
numHits = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int numHits() {
|
||||
return numHits;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int factor() {
|
||||
return this.factor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int incrementalFactor() {
|
||||
return this.incrementalFactor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processResults(TopDocs topDocs, SearchContext context) {
|
||||
Map<Object, TIntObjectHashMap<ParentDoc>> parentDocsPerReader = new HashMap<Object, TIntObjectHashMap<ParentDoc>>();
|
||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.facet;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -29,9 +28,9 @@ import org.elasticsearch.index.search.nested.NestedChildrenCollector;
|
|||
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -64,7 +63,8 @@ public class FacetParseElement implements SearchParseElement {
|
|||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
XContentParser.Token token;
|
||||
|
||||
List<FacetCollector> facetCollectors = null;
|
||||
List<FacetCollector> queryCollectors = null;
|
||||
List<FacetCollector> globalCollectors = null;
|
||||
|
||||
String topLevelFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -72,7 +72,7 @@ public class FacetParseElement implements SearchParseElement {
|
|||
topLevelFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
FacetCollector facet = null;
|
||||
String scope = ContextIndexSearcher.Scopes.MAIN;
|
||||
boolean global = false;
|
||||
String facetFieldName = null;
|
||||
Filter filter = null;
|
||||
boolean cacheFilter = true;
|
||||
|
@ -92,9 +92,7 @@ public class FacetParseElement implements SearchParseElement {
|
|||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("global".equals(facetFieldName)) {
|
||||
if (parser.booleanValue()) {
|
||||
scope = ContextIndexSearcher.Scopes.GLOBAL;
|
||||
}
|
||||
global = parser.booleanValue();
|
||||
} else if ("scope".equals(facetFieldName) || "_scope".equals(facetFieldName)) {
|
||||
throw new SearchParseException(context, "the [scope] support in facets have been removed");
|
||||
} else if ("cache_filter".equals(facetFieldName) || "cacheFilter".equals(facetFieldName)) {
|
||||
|
@ -131,14 +129,21 @@ public class FacetParseElement implements SearchParseElement {
|
|||
throw new SearchParseException(context, "no facet type found for facet named [" + topLevelFieldName + "]");
|
||||
}
|
||||
|
||||
if (facetCollectors == null) {
|
||||
facetCollectors = Lists.newArrayList();
|
||||
if (global) {
|
||||
if (globalCollectors == null) {
|
||||
globalCollectors = new ArrayList<FacetCollector>();
|
||||
}
|
||||
globalCollectors.add(facet);
|
||||
} else {
|
||||
if (queryCollectors == null) {
|
||||
queryCollectors = new ArrayList<FacetCollector>();
|
||||
}
|
||||
queryCollectors.add(facet);
|
||||
}
|
||||
facetCollectors.add(facet);
|
||||
context.searcher().addCollector(scope, facet);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
context.facets(new SearchContextFacets(facetCollectors));
|
||||
context.facets(new SearchContextFacets(queryCollectors, globalCollectors));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
|
|||
import org.elasticsearch.common.lucene.search.XFilteredQuery;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.query.QueryPhaseExecutionException;
|
||||
|
||||
|
@ -64,11 +63,16 @@ public class FacetPhase implements SearchPhase {
|
|||
|
||||
@Override
|
||||
public void preProcess(SearchContext context) {
|
||||
if (context.facets() != null && context.facets().queryCollectors() != null) {
|
||||
for (FacetCollector collector : context.facets().queryCollectors()) {
|
||||
context.searcher().addMainQueryCollector(collector);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(SearchContext context) throws ElasticSearchException {
|
||||
if (context.facets() == null || context.facets().facetCollectors() == null) {
|
||||
if (context.facets() == null) {
|
||||
return;
|
||||
}
|
||||
if (context.queryResult().facets() != null) {
|
||||
|
@ -78,7 +82,7 @@ public class FacetPhase implements SearchPhase {
|
|||
|
||||
// optimize global facet execution, based on filters (don't iterate over all docs), and check
|
||||
// if we have special facets that can be optimized for all execution, do it
|
||||
List<Collector> collectors = context.searcher().removeCollectors(ContextIndexSearcher.Scopes.GLOBAL);
|
||||
List<FacetCollector> collectors = context.facets().globalCollectors();
|
||||
|
||||
if (collectors != null && !collectors.isEmpty()) {
|
||||
Map<Filter, List<Collector>> filtersByCollector = Maps.newHashMap();
|
||||
|
@ -126,8 +130,13 @@ public class FacetPhase implements SearchPhase {
|
|||
SearchContextFacets contextFacets = context.facets();
|
||||
|
||||
List<Facet> facets = Lists.newArrayListWithCapacity(2);
|
||||
if (contextFacets.facetCollectors() != null) {
|
||||
for (FacetCollector facetCollector : contextFacets.facetCollectors()) {
|
||||
if (contextFacets.queryCollectors() != null) {
|
||||
for (FacetCollector facetCollector : contextFacets.queryCollectors()) {
|
||||
facets.add(facetCollector.facet());
|
||||
}
|
||||
}
|
||||
if (contextFacets.globalCollectors() != null) {
|
||||
for (FacetCollector facetCollector : contextFacets.globalCollectors()) {
|
||||
facets.add(facetCollector.facet());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,13 +26,19 @@ import java.util.List;
|
|||
*/
|
||||
public class SearchContextFacets {
|
||||
|
||||
private final List<FacetCollector> facetCollectors;
|
||||
private final List<FacetCollector> queryCollectors;
|
||||
private final List<FacetCollector> globalCollectors;
|
||||
|
||||
public SearchContextFacets(List<FacetCollector> facetCollectors) {
|
||||
this.facetCollectors = facetCollectors;
|
||||
public SearchContextFacets(List<FacetCollector> queryCollectors, List<FacetCollector> globalCollectors) {
|
||||
this.queryCollectors = queryCollectors;
|
||||
this.globalCollectors = globalCollectors;
|
||||
}
|
||||
|
||||
public List<FacetCollector> facetCollectors() {
|
||||
return facetCollectors;
|
||||
public List<FacetCollector> queryCollectors() {
|
||||
return queryCollectors;
|
||||
}
|
||||
|
||||
public List<FacetCollector> globalCollectors() {
|
||||
return globalCollectors;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,7 @@
|
|||
package org.elasticsearch.search.internal;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.common.lucene.MinimumScoreCollector;
|
||||
import org.elasticsearch.common.lucene.MultiCollector;
|
||||
|
@ -34,34 +31,31 @@ import org.elasticsearch.index.engine.Engine;
|
|||
import org.elasticsearch.search.dfs.CachedDfSource;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ContextIndexSearcher extends IndexSearcher {
|
||||
|
||||
public static final class Scopes {
|
||||
public static final String MAIN = "_main_";
|
||||
public static final String GLOBAL = "_global_";
|
||||
public static final String NA = "_na_";
|
||||
public static enum Stage {
|
||||
NA,
|
||||
MAIN_QUERY,
|
||||
REWRITE
|
||||
}
|
||||
|
||||
private final SearchContext searchContext;
|
||||
|
||||
private final IndexReader reader;
|
||||
|
||||
private CachedDfSource dfSource;
|
||||
|
||||
private Map<String, List<Collector>> scopeCollectors;
|
||||
private List<Collector> queryCollectors;
|
||||
|
||||
private String processingScope;
|
||||
private Stage currentState = Stage.NA;
|
||||
|
||||
public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
|
||||
super(searcher.reader());
|
||||
this.searchContext = searchContext;
|
||||
this.reader = searcher.reader();
|
||||
setSimilarity(searcher.searcher().getSimilarity());
|
||||
}
|
||||
|
||||
|
@ -69,46 +63,23 @@ public class ContextIndexSearcher extends IndexSearcher {
|
|||
this.dfSource = dfSource;
|
||||
}
|
||||
|
||||
public void addCollector(String scope, Collector collector) {
|
||||
if (scopeCollectors == null) {
|
||||
scopeCollectors = Maps.newHashMap();
|
||||
/**
|
||||
* Adds a query level collector that runs at {@link Stage#MAIN_QUERY}
|
||||
*/
|
||||
public void addMainQueryCollector(Collector collector) {
|
||||
if (queryCollectors == null) {
|
||||
queryCollectors = new ArrayList<Collector>();
|
||||
}
|
||||
List<Collector> collectors = scopeCollectors.get(scope);
|
||||
if (collectors == null) {
|
||||
collectors = Lists.newArrayList();
|
||||
scopeCollectors.put(scope, collectors);
|
||||
}
|
||||
collectors.add(collector);
|
||||
queryCollectors.add(collector);
|
||||
}
|
||||
|
||||
public List<Collector> removeCollectors(String scope) {
|
||||
if (scopeCollectors == null) {
|
||||
return null;
|
||||
}
|
||||
return scopeCollectors.remove(scope);
|
||||
public void inStage(Stage stage) {
|
||||
this.currentState = stage;
|
||||
}
|
||||
|
||||
public boolean hasCollectors(String scope) {
|
||||
if (scopeCollectors == null) {
|
||||
return false;
|
||||
}
|
||||
if (!scopeCollectors.containsKey(scope)) {
|
||||
return false;
|
||||
}
|
||||
return !scopeCollectors.get(scope).isEmpty();
|
||||
}
|
||||
|
||||
public void processingScope(String scope) {
|
||||
this.processingScope = scope;
|
||||
}
|
||||
|
||||
public void processedScope() {
|
||||
// clean the current scope (we processed it, also handles scrolling since we don't want to
|
||||
// do it again)
|
||||
if (scopeCollectors != null) {
|
||||
scopeCollectors.remove(processingScope);
|
||||
}
|
||||
this.processingScope = Scopes.NA;
|
||||
public void finishStage(Stage stage) {
|
||||
assert currentState == stage;
|
||||
this.currentState = Stage.NA;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -166,7 +137,7 @@ public class ContextIndexSearcher extends IndexSearcher {
|
|||
|
||||
@Override
|
||||
public void search(List<AtomicReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
||||
if (searchContext.parsedFilter() != null && Scopes.MAIN.equals(processingScope)) {
|
||||
if (searchContext.parsedFilter() != null && currentState == Stage.MAIN_QUERY) {
|
||||
// this will only get applied to the actual search collector and not
|
||||
// to any scoped collectors, also, it will only be applied to the main collector
|
||||
// since that is where the filter should only work
|
||||
|
@ -176,10 +147,9 @@ public class ContextIndexSearcher extends IndexSearcher {
|
|||
// TODO: change to use our own counter that uses the scheduler in ThreadPool
|
||||
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), searchContext.timeoutInMillis());
|
||||
}
|
||||
if (scopeCollectors != null) {
|
||||
List<Collector> collectors = scopeCollectors.get(processingScope);
|
||||
if (collectors != null && !collectors.isEmpty()) {
|
||||
collector = new MultiCollector(collector, collectors.toArray(new Collector[collectors.size()]));
|
||||
if (currentState == Stage.MAIN_QUERY) {
|
||||
if (queryCollectors != null && !queryCollectors.isEmpty()) {
|
||||
collector = new MultiCollector(collector, queryCollectors.toArray(new Collector[queryCollectors.size()]));
|
||||
}
|
||||
}
|
||||
// apply the minimum score after multi collector so we filter facets as well
|
||||
|
@ -187,7 +157,6 @@ public class ContextIndexSearcher extends IndexSearcher {
|
|||
collector = new MinimumScoreCollector(collector, searchContext.minimumScore());
|
||||
}
|
||||
|
||||
|
||||
// we only compute the doc id set once since within a context, we execute the same query always...
|
||||
if (searchContext.timeoutInMillis() != -1) {
|
||||
try {
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface ScopePhase {
|
||||
|
||||
String scope();
|
||||
|
||||
void clear();
|
||||
|
||||
Query query();
|
||||
|
||||
public interface TopDocsPhase extends ScopePhase {
|
||||
|
||||
void processResults(TopDocs topDocs, SearchContext context);
|
||||
|
||||
int numHits();
|
||||
|
||||
int factor();
|
||||
|
||||
int incrementalFactor();
|
||||
}
|
||||
|
||||
public interface CollectorPhase extends ScopePhase {
|
||||
|
||||
boolean requiresProcessing();
|
||||
|
||||
Collector collector();
|
||||
|
||||
void processCollector(Collector collector);
|
||||
}
|
||||
}
|
|
@ -85,6 +85,13 @@ public class SearchContext implements Releasable {
|
|||
return current.get();
|
||||
}
|
||||
|
||||
public static interface Rewrite {
|
||||
|
||||
void contextRewrite(SearchContext searchContext) throws Exception;
|
||||
|
||||
void contextClear();
|
||||
}
|
||||
|
||||
private final long id;
|
||||
|
||||
private final ShardSearchRequest request;
|
||||
|
@ -168,7 +175,7 @@ public class SearchContext implements Releasable {
|
|||
|
||||
private volatile long lastAccessTime;
|
||||
|
||||
private List<ScopePhase> scopePhases = null;
|
||||
private List<Rewrite> rewrites = null;
|
||||
|
||||
public SearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
|
||||
Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService) {
|
||||
|
@ -196,9 +203,9 @@ public class SearchContext implements Releasable {
|
|||
scanContext.clear();
|
||||
}
|
||||
// clear and scope phase we have
|
||||
if (scopePhases != null) {
|
||||
for (ScopePhase scopePhase : scopePhases) {
|
||||
scopePhase.clear();
|
||||
if (rewrites != null) {
|
||||
for (Rewrite rewrite : rewrites) {
|
||||
rewrite.contextClear();
|
||||
}
|
||||
}
|
||||
engineSearcher.release();
|
||||
|
@ -564,15 +571,15 @@ public class SearchContext implements Releasable {
|
|||
return fetchResult;
|
||||
}
|
||||
|
||||
public List<ScopePhase> scopePhases() {
|
||||
return this.scopePhases;
|
||||
public void addRewrite(Rewrite rewrite) {
|
||||
if (this.rewrites == null) {
|
||||
this.rewrites = new ArrayList<Rewrite>();
|
||||
}
|
||||
this.rewrites.add(rewrite);
|
||||
}
|
||||
|
||||
public void addScopePhase(ScopePhase scopePhase) {
|
||||
if (this.scopePhases == null) {
|
||||
this.scopePhases = new ArrayList<ScopePhase>();
|
||||
}
|
||||
this.scopePhases.add(scopePhase);
|
||||
public List<Rewrite> rewrites() {
|
||||
return this.rewrites;
|
||||
}
|
||||
|
||||
public ScanContext scanContext() {
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.search.query;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TotalHitCountCollector;
|
||||
|
@ -31,12 +30,12 @@ import org.elasticsearch.search.SearchParseElement;
|
|||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.facet.FacetPhase;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.ScopePhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.SortParseElement;
|
||||
import org.elasticsearch.search.sort.TrackScoresParseElement;
|
||||
import org.elasticsearch.search.suggest.SuggestPhase;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -84,79 +83,22 @@ public class QueryPhase implements SearchPhase {
|
|||
|
||||
public void execute(SearchContext searchContext) throws QueryPhaseExecutionException {
|
||||
searchContext.queryResult().searchTimedOut(false);
|
||||
// set the filter on the searcher
|
||||
if (searchContext.scopePhases() != null) {
|
||||
// we have scoped queries, refresh the id cache
|
||||
|
||||
List<SearchContext.Rewrite> rewrites = searchContext.rewrites();
|
||||
if (rewrites != null) {
|
||||
try {
|
||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "Failed to refresh id cache for child queries", e);
|
||||
}
|
||||
|
||||
// the first scope level is the most nested child
|
||||
for (ScopePhase scopePhase : searchContext.scopePhases()) {
|
||||
if (scopePhase instanceof ScopePhase.TopDocsPhase) {
|
||||
ScopePhase.TopDocsPhase topDocsPhase = (ScopePhase.TopDocsPhase) scopePhase;
|
||||
int numDocs = (searchContext.from() + searchContext.size());
|
||||
if (numDocs == 0) {
|
||||
numDocs = 1;
|
||||
}
|
||||
try {
|
||||
numDocs *= topDocsPhase.factor();
|
||||
while (true) {
|
||||
topDocsPhase.clear();
|
||||
if (topDocsPhase.scope() != null) {
|
||||
searchContext.searcher().processingScope(topDocsPhase.scope());
|
||||
}
|
||||
TopDocs topDocs = searchContext.searcher().search(topDocsPhase.query(), numDocs);
|
||||
if (topDocsPhase.scope() != null) {
|
||||
// we mark the scope as processed, so we don't process it again, even if we need to rerun the query...
|
||||
searchContext.searcher().processedScope();
|
||||
}
|
||||
topDocsPhase.processResults(topDocs, searchContext);
|
||||
|
||||
// check if we found enough docs, if so, break
|
||||
if (topDocsPhase.numHits() >= (searchContext.from() + searchContext.size())) {
|
||||
break;
|
||||
}
|
||||
// if we did not find enough docs, check if it make sense to search further
|
||||
if (topDocs.totalHits <= numDocs) {
|
||||
break;
|
||||
}
|
||||
// if not, update numDocs, and search again
|
||||
numDocs *= topDocsPhase.incrementalFactor();
|
||||
if (numDocs > topDocs.totalHits) {
|
||||
numDocs = topDocs.totalHits;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "Failed to execute child query [" + scopePhase.query() + "]", e);
|
||||
}
|
||||
} else if (scopePhase instanceof ScopePhase.CollectorPhase) {
|
||||
try {
|
||||
ScopePhase.CollectorPhase collectorPhase = (ScopePhase.CollectorPhase) scopePhase;
|
||||
// collector phase might not require extra processing, for example, when scrolling
|
||||
if (!collectorPhase.requiresProcessing()) {
|
||||
continue;
|
||||
}
|
||||
if (scopePhase.scope() != null) {
|
||||
searchContext.searcher().processingScope(scopePhase.scope());
|
||||
}
|
||||
Collector collector = collectorPhase.collector();
|
||||
searchContext.searcher().search(collectorPhase.query(), collector);
|
||||
collectorPhase.processCollector(collector);
|
||||
if (collectorPhase.scope() != null) {
|
||||
// we mark the scope as processed, so we don't process it again, even if we need to rerun the query...
|
||||
searchContext.searcher().processedScope();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "Failed to execute child query [" + scopePhase.query() + "]", e);
|
||||
}
|
||||
searchContext.searcher().inStage(ContextIndexSearcher.Stage.REWRITE);
|
||||
for (SearchContext.Rewrite rewrite : rewrites) {
|
||||
rewrite.contextRewrite(searchContext);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "failed to execute context rewrite", e);
|
||||
} finally {
|
||||
searchContext.searcher().finishStage(ContextIndexSearcher.Stage.REWRITE);
|
||||
}
|
||||
}
|
||||
|
||||
searchContext.searcher().processingScope(ContextIndexSearcher.Scopes.MAIN);
|
||||
searchContext.searcher().inStage(ContextIndexSearcher.Stage.MAIN_QUERY);
|
||||
try {
|
||||
searchContext.queryResult().from(searchContext.from());
|
||||
searchContext.queryResult().size(searchContext.size());
|
||||
|
@ -186,7 +128,7 @@ public class QueryPhase implements SearchPhase {
|
|||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e);
|
||||
} finally {
|
||||
searchContext.searcher().processedScope();
|
||||
searchContext.searcher().finishStage(ContextIndexSearcher.Stage.MAIN_QUERY);
|
||||
}
|
||||
|
||||
suggestPhase.execute(searchContext);
|
||||
|
|
Loading…
Reference in New Issue