Support the `has_child`, `has_parent`, `top_children` queries (and filters) in other apis than just search api. The queries are also now supported in count, explain (return dummy explination) and delete by query apis.

Migrate SearchContext.Rewrite to Releasable. All the parent child queries are now implemented as Lucene queries and the state is kept in the Weight.
Completely disable caching for `has_child` and `has_parent` filters, this has never worked and it also can also never work. The matching docIds are cached per segment while the collection of parent ids is top level.

Closes #3822
This commit is contained in:
Martijn van Groningen 2013-09-30 22:50:29 +02:00
parent 7953eed36a
commit 1f117c194c
29 changed files with 1546 additions and 823 deletions

View File

@ -19,8 +19,11 @@
package org.elasticsearch.action.deletebyquery;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.Filter;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.action.support.replication.TransportShardReplicationOperationAction;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
@ -31,8 +34,14 @@ import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.shard.service.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.internal.DefaultSearchContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -41,11 +50,16 @@ import org.elasticsearch.transport.TransportService;
*/
public class TransportShardDeleteByQueryAction extends TransportShardReplicationOperationAction<ShardDeleteByQueryRequest, ShardDeleteByQueryRequest, ShardDeleteByQueryResponse> {
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
@Inject
public TransportShardDeleteByQueryAction(Settings settings, TransportService transportService,
ClusterService clusterService, IndicesService indicesService, ThreadPool threadPool,
ShardStateAction shardStateAction) {
ShardStateAction shardStateAction, ScriptService scriptService, CacheRecycler cacheRecycler) {
super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);
this.scriptService = scriptService;
this.cacheRecycler = cacheRecycler;
}
@Override
@ -91,10 +105,21 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
@Override
protected PrimaryResponse<ShardDeleteByQueryResponse, ShardDeleteByQueryRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
ShardDeleteByQueryRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types())
.origin(Engine.Operation.Origin.PRIMARY);
indexShard.deleteByQuery(deleteByQuery);
IndexService indexService = indicesService.indexServiceSafe(shardRequest.request.index());
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId);
SearchContext.setCurrent(new DefaultSearchContext(0, new ShardSearchRequest().types(request.types()), null,
indexShard.acquireSearcher("delete_by_query"), indexService, indexShard, scriptService, cacheRecycler));
try {
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types())
.origin(Engine.Operation.Origin.PRIMARY);
SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of()));
indexShard.deleteByQuery(deleteByQuery);
} finally {
SearchContext searchContext = SearchContext.current();
searchContext.clearAndRelease();
SearchContext.removeCurrent();
}
return new PrimaryResponse<ShardDeleteByQueryResponse, ShardDeleteByQueryRequest>(shardRequest.request, new ShardDeleteByQueryResponse(), null);
}
@ -102,10 +127,21 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
@Override
protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {
ShardDeleteByQueryRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types())
.origin(Engine.Operation.Origin.REPLICA);
indexShard.deleteByQuery(deleteByQuery);
IndexService indexService = indicesService.indexServiceSafe(shardRequest.request.index());
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId);
SearchContext.setCurrent(new DefaultSearchContext(0, new ShardSearchRequest().types(request.types()), null,
indexShard.acquireSearcher("delete_by_query"), indexService, indexShard, scriptService, cacheRecycler));
try {
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.querySource(), request.filteringAliases(), request.types())
.origin(Engine.Operation.Origin.REPLICA);
SearchContext.current().parsedQuery(new ParsedQuery(deleteByQuery.query(), ImmutableMap.<String, Filter>of()));
indexShard.deleteByQuery(deleteByQuery);
} finally {
SearchContext searchContext = SearchContext.current();
searchContext.clearAndRelease();
SearchContext.removeCurrent();
}
}
@Override

View File

@ -79,7 +79,13 @@ public abstract class MatchDocIdSet extends DocIdSet implements Bits {
*/
public void shortCircuit() {
try {
iterator.advance(DocIdSetIterator.NO_MORE_DOCS);
if (iterator instanceof NoAcceptDocsIterator) {
((NoAcceptDocsIterator) iterator).shortCircuit();
} else if (iterator instanceof BothIterator) {
((BothIterator) iterator).shortCircuit();
} else {
iterator.advance(DocIdSetIterator.NO_MORE_DOCS);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -124,6 +130,12 @@ public abstract class MatchDocIdSet extends DocIdSet implements Bits {
public long cost() {
return maxDoc;
}
// This is invoked from matchDoc, so we can do maxDoc - 1, when nextDoc / advance is invoked, then doc == maxDoc, so we abort
public void shortCircuit() {
doc = maxDoc - 1;
}
}
class FixedBitSetIterator extends FilteredDocIdSetIterator {
@ -178,5 +190,10 @@ public abstract class MatchDocIdSet extends DocIdSet implements Bits {
public long cost() {
return maxDoc;
}
// This is invoked from matchDoc, so we can do maxDoc - 1, when nextDoc / advance is invoked, then doc == maxDoc, so we abort
public void shortCircuit() {
doc = maxDoc - 1;
}
}
}

View File

@ -160,7 +160,11 @@ public class XFilteredQuery extends Query {
*/
@Override
public boolean equals(Object o) {
return delegate.equals(o);
if (!(o instanceof XFilteredQuery)) {
return false;
} else {
return delegate.equals(((XFilteredQuery)o).delegate);
}
}
/**

View File

@ -0,0 +1,51 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.recycler;
/**
*/
public final class RecyclerUtils {
private RecyclerUtils() {
}
// Borrowed from IOUtils#close
public static void release(Recycler.V... recyclers) {
Throwable th = null;
for (Recycler.V recycler : recyclers) {
try {
if (recycler != null) {
recycler.release();
}
} catch (Throwable t) {
if (th == null) {
th = t;
}
}
}
if (th != null) {
throw new RuntimeException(th);
}
}
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.component.CloseableComponent;
import org.elasticsearch.index.IndexComponent;
import org.elasticsearch.index.service.IndexService;
import java.io.IOException;
import java.util.List;
/**
@ -41,7 +42,7 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable<Id
void clear(IndexReader reader);
void refresh(List<AtomicReaderContext> readers) throws Exception;
void refresh(List<AtomicReaderContext> readers) throws IOException;
IdReaderCache reader(AtomicReader reader);
}

View File

@ -42,6 +42,7 @@ import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.index.shard.service.IndexShard;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
@ -105,7 +106,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
@SuppressWarnings({"StringEquality"})
@Override
public void refresh(List<AtomicReaderContext> atomicReaderContexts) throws Exception {
public void refresh(List<AtomicReaderContext> atomicReaderContexts) throws IOException {
// do a quick check for the common case, that all are there
if (refreshNeeded(atomicReaderContexts)) {
synchronized (idReaders) {

View File

@ -21,15 +21,15 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.search.child.HasChildFilter;
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -59,8 +59,6 @@ public class HasChildFilterParser implements FilterParser {
String childType = null;
int shortCircuitParentDocSet = 8192; // Tests show a cut of point between 8192 and 16384.
boolean cache = false;
CacheKeyFilter.Key cacheKey = null;
String filterName = null;
String currentFieldName = null;
XContentParser.Token token;
@ -99,9 +97,9 @@ public class HasChildFilterParser implements FilterParser {
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
// noop to be backwards compatible
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
// noop to be backwards compatible
} else if ("short_circuit_cutoff".equals(currentFieldName)) {
shortCircuitParentDocSet = parser.intValue();
} else {
@ -131,28 +129,23 @@ public class HasChildFilterParser implements FilterParser {
// wrap the query with type query
query = new XFilteredQuery(query, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
}
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "[has_child] Type [" + childType + "] points to a non existent parent type [" + parentType + "]");
}
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
HasChildFilter childFilter = new HasChildFilter(query, parentType, childType, parentFilter, searchContext, shortCircuitParentDocSet);
searchContext.addRewrite(childFilter);
Filter filter = childFilter;
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
Query childrenConstantScoreQuery = new ChildrenConstantScoreQuery(query, parentType, childType, parentFilter, shortCircuitParentDocSet, false);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
parseContext.addNamedQuery(filterName, childrenConstantScoreQuery);
}
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
if (deleteByQuery) {
return new DeleteByQueryWrappingFilter(childrenConstantScoreQuery);
} else {
return new CustomQueryWrappingFilter(childrenConstantScoreQuery);
}
return filter;
}
}

View File

@ -21,14 +21,14 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.ChildrenQuery;
import org.elasticsearch.index.search.child.HasChildFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.search.internal.SearchContext;
@ -132,22 +132,16 @@ public class HasChildQueryParser implements QueryParser {
throw new QueryParsingException(parseContext.index(), "[has_child] Type [" + childType + "] points to a non existent parent type [" + parentType + "]");
}
// wrap the query with type query
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
}
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
Query query;
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
if (scoreType != null) {
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, innerQuery, scoreType, shortCircuitParentDocSet);
searchContext.addRewrite(childrenQuery);
query = childrenQuery;
if (!deleteByQuery && scoreType != null) {
query = new ChildrenQuery(parentType, childType, parentFilter, innerQuery, scoreType, shortCircuitParentDocSet);
} else {
HasChildFilter hasChildFilter = new HasChildFilter(innerQuery, parentType, childType, parentFilter, searchContext, shortCircuitParentDocSet);
searchContext.addRewrite(hasChildFilter);
query = new XConstantScoreQuery(hasChildFilter);
query = new ChildrenConstantScoreQuery(innerQuery, parentType, childType, parentFilter, shortCircuitParentDocSet, true);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NotFilter;
@ -33,7 +32,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.search.child.HasParentFilter;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.index.search.child.ParentConstantScoreQuery;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -130,18 +131,12 @@ public class HasParentFilterParser implements FilterParser {
// wrap the query with type query
query = new XFilteredQuery(query, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
// In case of delete by query api
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set");
}
Set<String> parentTypes = new HashSet<String>(5);
parentTypes.add(parentType);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper != null) {
DocumentMapper parentTypeDocumentMapper = searchContext.mapperService().documentMapper(parentFieldMapper.type());
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());
@ -163,18 +158,18 @@ public class HasParentFilterParser implements FilterParser {
parentFilter = parentsFilter;
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
HasParentFilter hasParentFilter = new HasParentFilter(query, parentType, searchContext, childrenFilter);
searchContext.addRewrite(hasParentFilter);
Filter filter = hasParentFilter;
if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
Query parentConstantScoreQuery = new ParentConstantScoreQuery(query, parentType, childrenFilter, false);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
parseContext.addNamedQuery(filterName, parentConstantScoreQuery);
}
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
if (deleteByQuery) {
return new DeleteByQueryWrappingFilter(parentConstantScoreQuery);
} else {
return new CustomQueryWrappingFilter(parentConstantScoreQuery);
}
return filter;
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NotFilter;
@ -32,7 +31,8 @@ import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.search.child.HasParentFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.index.search.child.ParentConstantScoreQuery;
import org.elasticsearch.index.search.child.ParentQuery;
import org.elasticsearch.search.internal.SearchContext;
@ -129,17 +129,13 @@ public class HasParentQueryParser implements QueryParser {
innerQuery.setBoost(boost);
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set.");
}
Set<String> parentTypes = new HashSet<String>(5);
parentTypes.add(parentType);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper != null) {
DocumentMapper parentTypeDocumentMapper = searchContext.mapperService().documentMapper(parentFieldMapper.type());
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());
@ -162,15 +158,15 @@ public class HasParentQueryParser implements QueryParser {
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
Query query;
if (score) {
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childrenFilter);
searchContext.addRewrite(parentQuery);
query = parentQuery;
if (!deleteByQuery && score) {
query = new ParentQuery(innerQuery, parentType, childrenFilter);
} else {
HasParentFilter hasParentFilter = new HasParentFilter(innerQuery, parentType, searchContext, childrenFilter);
searchContext.addRewrite(hasParentFilter);
query = new XConstantScoreQuery(hasParentFilter);
query = new ParentConstantScoreQuery(innerQuery, parentType, childrenFilter, true);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}
}
query.setBoost(boost);
if (queryName != null) {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
@ -113,6 +112,10 @@ public class TopChildrenQueryParser implements QueryParser {
return null;
}
if ("delete_by_query".equals(SearchContext.current().source())) {
throw new QueryParsingException(parseContext.index(), "[top_children] unsupported in delete_by_query api");
}
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
if (childDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");
@ -125,13 +128,7 @@ public class TopChildrenQueryParser implements QueryParser {
query.setBoost(boost);
// wrap the query with type query
query = new XFilteredQuery(query, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[top_children] Can't execute, search context not set.");
}
TopChildrenQuery childQuery = new TopChildrenQuery(query, childType, parentType, scoreType, factor, incrementalFactor, parseContext.cacheRecycler());
searchContext.addRewrite(childQuery);
if (queryName != null) {
parseContext.addNamedQuery(queryName, childQuery);
}

View File

@ -0,0 +1,282 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.recycler.RecyclerUtils;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Set;
/**
*
*/
public class ChildrenConstantScoreQuery extends Query {
private final Query originalChildQuery;
private final String parentType;
private final String childType;
private final Filter parentFilter;
private final int shortCircuitParentDocSet;
private final boolean applyAcceptedDocs;
private Query rewrittenChildQuery;
private IndexReader rewriteIndexReader;
public ChildrenConstantScoreQuery(Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, boolean applyAcceptedDocs) {
this.parentFilter = parentFilter;
this.parentType = parentType;
this.childType = childType;
this.originalChildQuery = childQuery;
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
this.applyAcceptedDocs = applyAcceptedDocs;
}
@Override
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenChildQuery == null) {
rewrittenChildQuery = originalChildQuery.rewrite(reader);
rewriteIndexReader = reader;
}
return this;
}
@Override
public void extractTerms(Set<Term> terms) {
rewrittenChildQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searcher.getTopReaderContext().leaves());
Recycler.V<ObjectOpenHashSet<HashedBytesArray>> collectedUids = searchContext.cacheRecycler().hashSet(-1);
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids.v());
final Query childQuery;
if (rewrittenChildQuery == null) {
childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
} else {
assert rewriteIndexReader == searcher.getIndexReader();
childQuery = rewrittenChildQuery;
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.search(childQuery, collector);
int remaining = collectedUids.v().size();
if (remaining == 0) {
return Queries.NO_MATCH_QUERY.createWeight(searcher);
}
Filter shortCircuitFilter = null;
if (remaining == 1) {
BytesRef id = collectedUids.v().iterator().next().value.toBytesRef();
shortCircuitFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
} else if (remaining <= shortCircuitParentDocSet) {
shortCircuitFilter = new ParentIdsFilter(parentType, collectedUids.v().keys, collectedUids.v().allocated);
}
ParentWeight parentWeight = new ParentWeight(parentFilter, shortCircuitFilter, searchContext, collectedUids);
searchContext.addReleasable(parentWeight);
return parentWeight;
}
private final class ParentWeight extends Weight implements Releasable {
private final Filter parentFilter;
private final Filter shortCircuitFilter;
private final SearchContext searchContext;
private final Recycler.V<ObjectOpenHashSet<HashedBytesArray>> collectedUids;
private int remaining;
private float queryNorm;
private float queryWeight;
public ParentWeight(Filter parentFilter, Filter shortCircuitFilter, SearchContext searchContext, Recycler.V<ObjectOpenHashSet<HashedBytesArray>> collectedUids) {
if (applyAcceptedDocs) {
// In case filters are cached, we need to apply deletes, since filters from filter cache didn't apply deletes
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
this.shortCircuitFilter = new ApplyAcceptedDocsFilter(shortCircuitFilter);
} else {
this.parentFilter = parentFilter;
this.shortCircuitFilter = shortCircuitFilter;
}
this.searchContext = searchContext;
this.collectedUids = collectedUids;
this.remaining = collectedUids.v().size();
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ChildrenConstantScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
if (remaining == 0) {
return null;
}
if (!applyAcceptedDocs) {
acceptDocs = null;
}
if (shortCircuitFilter != null) {
return ConstantScorer.create(shortCircuitFilter.getDocIdSet(context, acceptDocs), this, queryWeight);
}
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(parentDocIdSet)) {
return null;
}
Bits parentsBits = DocIdSets.toSafeBits(context.reader(), parentDocIdSet);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
DocIdSet docIdSet = new ParentDocSet(context.reader(), parentsBits, collectedUids.v(), idReaderTypeCache);
return ConstantScorer.create(docIdSet, this, queryWeight);
} else {
return null;
}
}
@Override
public boolean release() throws ElasticSearchException {
RecyclerUtils.release(collectedUids);
return true;
}
private final class ParentDocSet extends MatchDocIdSet {
private final ObjectOpenHashSet<HashedBytesArray> parents;
private final IdReaderTypeCache typeCache;
ParentDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
super(reader.maxDoc(), acceptDocs);
this.parents = parents;
this.typeCache = typeCache;
}
@Override
protected boolean matchDoc(int doc) {
if (remaining == 0) {
shortCircuit();
return false;
}
boolean match = parents.contains(typeCache.idByDoc(doc));
if (match) {
remaining--;
}
return match;
}
}
}
private final static class UidCollector extends ParentIdCollector {
private final ObjectOpenHashSet<HashedBytesArray> collectedUids;
UidCollector(String parentType, SearchContext context, ObjectOpenHashSet<HashedBytesArray> collectedUids) {
super(parentType, context);
this.collectedUids = collectedUids;
}
@Override
public void collect(int doc, HashedBytesArray parentIdByDoc) {
collectedUids.add(parentIdByDoc);
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
ChildrenConstantScoreQuery that = (ChildrenConstantScoreQuery) obj;
if (!originalChildQuery.equals(that.originalChildQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
if (shortCircuitParentDocSet != that.shortCircuitParentDocSet) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = originalChildQuery.hashCode();
result = 31 * result + childType.hashCode();
result += shortCircuitParentDocSet;
return result;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("child_filter[").append(childType).append("/").append(parentType).append("](").append(originalChildQuery).append(')');
return sb.toString();
}
}

View File

@ -29,12 +29,14 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.recycler.RecyclerUtils;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -53,9 +55,8 @@ import java.util.Set;
* a score based on the aggregated child scores and score type.
*/
// TODO We use a score of 0 to indicate a doc was not scored in uidToScore, this means score of 0 can be problematic, if we move to HPCC, we can use lset/...
public class ChildrenQuery extends Query implements SearchContext.Rewrite {
public class ChildrenQuery extends Query {
private final SearchContext searchContext;
private final String parentType;
private final String childType;
private final Filter parentFilter;
@ -64,11 +65,9 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
private final int shortCircuitParentDocSet;
private Query rewrittenChildQuery;
private Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
private Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount;
private IndexReader rewriteIndexReader;
public ChildrenQuery(SearchContext searchContext, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet) {
this.searchContext = searchContext;
public ChildrenQuery(String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet) {
this.parentType = parentType;
this.childType = childType;
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
@ -115,6 +114,7 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenChildQuery == null) {
rewriteIndexReader = reader;
rewrittenChildQuery = originalChildQuery.rewrite(reader);
}
return this;
@ -126,11 +126,14 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
}
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
uidToScore = searchContext.cacheRecycler().objectFloatMap(-1);
Collector collector;
Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore = searchContext.cacheRecycler().objectFloatMap(-1);
Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount = null;
final Collector collector;
switch (scoreType) {
case AVG:
uidToCount = searchContext.cacheRecycler().objectIntMap(-1);
@ -139,39 +142,22 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
default:
collector = new ChildUidCollector(scoreType, searchContext, parentType, uidToScore.v());
}
Query childQuery;
final Query childQuery;
if (rewrittenChildQuery == null) {
childQuery = rewrittenChildQuery = searchContext.searcher().rewrite(originalChildQuery);
childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
} else {
assert rewriteIndexReader == searcher.getIndexReader();
childQuery = rewrittenChildQuery;
}
searchContext.searcher().search(childQuery, collector);
}
@Override
public void executionDone() {
if (uidToScore != null) {
uidToScore.release();
}
uidToScore = null;
if (uidToCount != null) {
uidToCount.release();
}
uidToCount = null;
}
@Override
public void contextClear() {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
if (uidToScore == null) {
throw new ElasticSearchIllegalStateException("has_child query hasn't executed properly");
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.search(childQuery, collector);
int size = uidToScore.v().size();
if (size == 0) {
uidToScore.release();
if (uidToCount != null) {
uidToCount.release();
}
return Queries.NO_MATCH_QUERY.createWeight(searcher);
}
@ -184,19 +170,28 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
} else {
parentFilter = this.parentFilter;
}
return new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, size);
ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, searchContext, size, uidToScore, uidToCount);
searchContext.addReleasable(parentWeight);
return parentWeight;
}
final class ParentWeight extends Weight {
private final class ParentWeight extends Weight implements Releasable {
final Weight childWeight;
final Filter parentFilter;
int remaining;
private final Weight childWeight;
private final Filter parentFilter;
private final SearchContext searchContext;
private final Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
private final Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount;
public ParentWeight(Weight childWeight, Filter parentFilter, int remaining) {
private int remaining;
private ParentWeight(Weight childWeight, Filter parentFilter, SearchContext searchContext, int remaining, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore, Recycler.V<ObjectIntOpenHashMap<HashedBytesArray>> uidToCount) {
this.childWeight = childWeight;
this.parentFilter = parentFilter;
this.searchContext = searchContext;
this.remaining = remaining;
this.uidToScore = uidToScore;
this.uidToCount= uidToCount;
}
@Override
@ -237,7 +232,13 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
}
}
class ParentScorer extends Scorer {
@Override
public boolean release() throws ElasticSearchException {
RecyclerUtils.release(uidToScore, uidToCount);
return true;
}
private class ParentScorer extends Scorer {
final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
final IdReaderTypeCache idTypeCache;
@ -320,7 +321,7 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
}
}
final class AvgParentScorer extends ParentScorer {
private final class AvgParentScorer extends ParentScorer {
HashedBytesArray currentUid;
final ObjectIntOpenHashMap<HashedBytesArray> uidToCount;
@ -369,11 +370,11 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
}
static class ChildUidCollector extends ParentIdCollector {
private static class ChildUidCollector extends ParentIdCollector {
final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
final ScoreType scoreType;
Scorer scorer;
protected final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
private final ScoreType scoreType;
protected Scorer scorer;
ChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, ObjectFloatOpenHashMap<HashedBytesArray> uidToScore) {
super(childType, searchContext);
@ -414,9 +415,9 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
}
final static class AvgChildUidCollector extends ChildUidCollector {
private final static class AvgChildUidCollector extends ChildUidCollector {
final ObjectIntOpenHashMap<HashedBytesArray> uidToCount;
private final ObjectIntOpenHashMap<HashedBytesArray> uidToCount;
AvgChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, ObjectFloatOpenHashMap<HashedBytesArray> uidToScore, ObjectIntOpenHashMap<HashedBytesArray> uidToCount) {
super(scoreType, searchContext, childType, uidToScore);

View File

@ -0,0 +1,83 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import java.io.IOException;
/**
* A scorer that wraps a {@link DocIdSetIterator} and emits a constant score.
*/
// Borrowed from ConstantScoreQuery
class ConstantScorer extends Scorer {
static ConstantScorer create(DocIdSet docIdSet, Weight weight, float constantScore) throws IOException {
if (DocIdSets.isEmpty(docIdSet)) {
return null;
}
return new ConstantScorer(docIdSet.iterator(), weight, constantScore);
}
private final DocIdSetIterator docIdSetIterator;
private final float constantScore;
private ConstantScorer(DocIdSetIterator docIdSetIterator, Weight w, float constantScore) {
super(w);
this.constantScore = constantScore;
this.docIdSetIterator = docIdSetIterator;
}
@Override
public int nextDoc() throws IOException {
return docIdSetIterator.nextDoc();
}
@Override
public int docID() {
return docIdSetIterator.docID();
}
@Override
public float score() throws IOException {
assert docIdSetIterator.docID() != NO_MORE_DOCS;
return constantScore;
}
@Override
public int freq() throws IOException {
return 1;
}
@Override
public int advance(int target) throws IOException {
return docIdSetIterator.advance(target);
}
@Override
public long cost() {
return docIdSetIterator.cost();
}
}

View File

@ -0,0 +1,94 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* Forked from {@link QueryWrapperFilter} to make sure the weight is only created once.
* This filter should never be cached! This filter only exists for internal usage.
*
* @elasticsearch.internal
*/
public class CustomQueryWrappingFilter extends Filter {
private final Query query;
private IndexSearcher searcher;
private Weight weight;
/** Constructs a filter which only matches documents matching
* <code>query</code>.
*/
public CustomQueryWrappingFilter(Query query) {
if (query == null)
throw new NullPointerException("Query may not be null");
this.query = query;
}
/** returns the inner Query */
public final Query getQuery() {
return query;
}
@Override
public DocIdSet getDocIdSet(final AtomicReaderContext context, final Bits acceptDocs) throws IOException {
SearchContext searchContext = SearchContext.current();
if (weight == null) {
assert searcher == null;
IndexSearcher searcher = searchContext.searcher();
weight = searcher.createNormalizedWeight(query);
this.searcher = searcher;
} else {
assert searcher == SearchContext.current().searcher();
}
return new DocIdSet() {
@Override
public DocIdSetIterator iterator() throws IOException {
return weight.scorer(context, true, false, acceptDocs);
}
@Override
public boolean isCacheable() { return false; }
};
}
@Override
public String toString() {
return "CustomQueryWrappingFilter(" + query + ")";
}
@Override
public boolean equals(Object o) {
if (!(o instanceof CustomQueryWrappingFilter))
return false;
return this.query.equals(((CustomQueryWrappingFilter)o).query);
}
@Override
public int hashCode() {
return query.hashCode() ^ 0x823D64C9;
}
}

View File

@ -0,0 +1,125 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* This filters just exist for wrapping parent child queries in the delete by query api.
* Don't use this filter for other purposes.
*
* @elasticsearch.internal
*/
public class DeleteByQueryWrappingFilter extends Filter {
private final Query query;
private IndexSearcher searcher;
private Weight weight;
/** Constructs a filter which only matches documents matching
* <code>query</code>.
*/
public DeleteByQueryWrappingFilter(Query query) {
if (query == null)
throw new NullPointerException("Query may not be null");
this.query = query;
}
/** returns the inner Query */
public final Query getQuery() {
return query;
}
@Override
public DocIdSet getDocIdSet(final AtomicReaderContext context, final Bits acceptDocs) throws IOException {
SearchContext searchContext = SearchContext.current();
if (weight == null) {
assert searcher == null;
searcher = searchContext.searcher();
IndexReader indexReader = SearchContext.current().searcher().getIndexReader();
IndexReader multiReader = null;
try {
if (!contains(indexReader, context)) {
multiReader = new MultiReader(new IndexReader[]{indexReader, context.reader()}, false);
searcher = new IndexSearcher(new MultiReader(indexReader, context.reader()));
}
weight = searcher.createNormalizedWeight(query);
} finally {
if (multiReader != null) {
multiReader.close();
}
}
} else {
IndexReader indexReader = searcher.getIndexReader();
if (!contains(indexReader, context)) {
IndexReader multiReader = new MultiReader(new IndexReader[]{indexReader, context.reader()}, false);
try {
searcher = new IndexSearcher(multiReader);
weight = searcher.createNormalizedWeight(query);
} finally {
multiReader.close();
}
}
}
return new DocIdSet() {
@Override
public DocIdSetIterator iterator() throws IOException {
return weight.scorer(context, true, false, acceptDocs);
}
@Override
public boolean isCacheable() { return false; }
};
}
@Override
public String toString() {
return "DeleteByQueryWrappingFilter(" + query + ")";
}
@Override
public boolean equals(Object o) {
if (!(o instanceof DeleteByQueryWrappingFilter))
return false;
return this.query.equals(((DeleteByQueryWrappingFilter)o).query);
}
@Override
public int hashCode() {
return query.hashCode() ^ 0x823D64CA;
}
static boolean contains(IndexReader indexReader, AtomicReaderContext context) {
for (AtomicReaderContext atomicReaderContext : indexReader.leaves()) {
if (context.reader().getCoreCacheKey().equals(atomicReaderContext.reader().getCoreCacheKey())) {
return true;
}
}
return false;
}
}

View File

@ -1,202 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
*
*/
public class HasChildFilter extends Filter implements SearchContext.Rewrite {
final Query childQuery;
final String parentType;
final String childType;
final Filter parentFilter;
final SearchContext searchContext;
final int shortCircuitParentDocSet;
Filter shortCircuitFilter;
int remaining;
Recycler.V<ObjectOpenHashSet<HashedBytesArray>> collectedUids;
public HasChildFilter(Query childQuery, String parentType, String childType, Filter parentFilter, SearchContext searchContext, int shortCircuitParentDocSet) {
this.parentFilter = parentFilter;
this.searchContext = searchContext;
this.parentType = parentType;
this.childType = childType;
this.childQuery = childQuery;
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
HasChildFilter that = (HasChildFilter) obj;
if (!childQuery.equals(that.childQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = childQuery.hashCode();
result = 31 * result + childType.hashCode();
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("child_filter[").append(childType).append("/").append(parentType).append("](").append(childQuery).append(')');
return sb.toString();
}
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (collectedUids == null) {
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
}
if (remaining == 0) {
return null;
}
if (shortCircuitFilter != null) {
return shortCircuitFilter.getDocIdSet(context, acceptDocs);
}
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, null);
if (DocIdSets.isEmpty(parentDocIdSet)) {
return null;
}
Bits parentsBits = DocIdSets.toSafeBits(context.reader(), parentDocIdSet);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
return new ParentDocSet(context.reader(), parentsBits, collectedUids.v(), idReaderTypeCache);
} else {
return null;
}
}
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
collectedUids = searchContext.cacheRecycler().hashSet(-1);
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids.v());
searchContext.searcher().search(childQuery, collector);
remaining = collectedUids.v().size();
if (remaining == 0) {
shortCircuitFilter = Queries.MATCH_NO_FILTER;
} else if (remaining == 1) {
BytesRef id = collectedUids.v().iterator().next().value.toBytesRef();
shortCircuitFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
} else if (remaining <= shortCircuitParentDocSet) {
shortCircuitFilter = new ParentIdsFilter(parentType, collectedUids.v().keys, collectedUids.v().allocated);
}
}
@Override
public void executionDone() {
if (collectedUids != null) {
collectedUids.release();
}
collectedUids = null;
shortCircuitFilter = null;
}
@Override
public void contextClear() {
}
final class ParentDocSet extends MatchDocIdSet {
final IndexReader reader;
final ObjectOpenHashSet<HashedBytesArray> parents;
final IdReaderTypeCache typeCache;
ParentDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
super(reader.maxDoc(), acceptDocs);
this.reader = reader;
this.parents = parents;
this.typeCache = typeCache;
}
@Override
protected boolean matchDoc(int doc) {
if (remaining == 0) {
shortCircuit();
return false;
}
boolean match = parents.contains(typeCache.idByDoc(doc));
if (match) {
remaining--;
}
return match;
}
}
final static class UidCollector extends ParentIdCollector {
final ObjectOpenHashSet<HashedBytesArray> collectedUids;
UidCollector(String parentType, SearchContext context, ObjectOpenHashSet<HashedBytesArray> collectedUids) {
super(parentType, context);
this.collectedUids = collectedUids;
}
@Override
public void collect(int doc, HashedBytesArray parentIdByDoc) {
collectedUids.add(parentIdByDoc);
}
}
}

View File

@ -1,182 +0,0 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
*/
public class HasParentFilter extends Filter implements SearchContext.Rewrite {
final Query parentQuery;
final String parentType;
final SearchContext context;
final Filter childrenFilter;
Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents;
public HasParentFilter(Query parentQuery, String parentType, SearchContext context, Filter childrenFilter) {
this.parentQuery = parentQuery;
this.parentType = parentType;
this.context = context;
this.childrenFilter = childrenFilter;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
HasParentFilter that = (HasParentFilter) obj;
if (!parentQuery.equals(that.parentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
return true;
}
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
if (parents == null) {
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
}
if (parents.v().isEmpty()) {
return null;
}
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(readerContext, null);
if (DocIdSets.isEmpty(childrenDocIdSet)) {
return null;
}
Bits childrenBits = DocIdSets.toSafeBits(readerContext.reader(), childrenDocIdSet);
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
if (idReaderTypeCache != null) {
return new ChildrenDocSet(readerContext.reader(), childrenBits, parents.v(), idReaderTypeCache);
} else {
return null;
}
}
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
parents = context.cacheRecycler().hashSet(-1);
ParentUidsCollector collector = new ParentUidsCollector(parents.v(), context, parentType);
searchContext.searcher().search(parentQuery, collector);
}
@Override
public void executionDone() {
if (parents != null) {
parents.release();
}
parents = null;
}
@Override
public void contextClear() {
}
final static class ChildrenDocSet extends MatchDocIdSet {
final IndexReader reader;
final ObjectOpenHashSet<HashedBytesArray> parents;
final IdReaderTypeCache idReaderTypeCache;
ChildrenDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
super(reader.maxDoc(), acceptDocs);
this.reader = reader;
this.parents = parents;
this.idReaderTypeCache = idReaderTypeCache;
}
@Override
protected boolean matchDoc(int doc) {
return parents.contains(idReaderTypeCache.parentIdByDoc(doc));
}
}
final static class ParentUidsCollector extends NoopCollector {
final ObjectOpenHashSet<HashedBytesArray> collectedUids;
final SearchContext context;
final String parentType;
IdReaderTypeCache typeCache;
ParentUidsCollector(ObjectOpenHashSet<HashedBytesArray> collectedUids, SearchContext context, String parentType) {
this.collectedUids = collectedUids;
this.context = context;
this.parentType = parentType;
}
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (typeCache != null) {
collectedUids.add(typeCache.idByDoc(doc));
}
}
@Override
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
}
}
@Override
public int hashCode() {
int result = parentQuery.hashCode();
result = 31 * result + parentType.hashCode();
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("parent_filter[").append(parentType).append("](").append(parentQuery).append(')');
return sb.toString();
}
}

View File

@ -0,0 +1,252 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.ObjectOpenHashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.recycler.RecyclerUtils;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Set;
/**
* A query that only return child documents that are linked to the parent documents that matched with the inner query.
*/
public class ParentConstantScoreQuery extends Query {
private final Query originalParentQuery;
private final String parentType;
private final Filter childrenFilter;
private final boolean applyAcceptedDocs;
private Query rewrittenParentQuery;
private IndexReader rewriteIndexReader;
public ParentConstantScoreQuery(Query parentQuery, String parentType, Filter childrenFilter, boolean applyAcceptedDocs) {
this.originalParentQuery = parentQuery;
this.parentType = parentType;
// In case the childrenFilter is cached.
if (applyAcceptedDocs) {
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
} else {
this.childrenFilter = childrenFilter;
}
this.applyAcceptedDocs = applyAcceptedDocs;
}
@Override
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenParentQuery == null) {
rewrittenParentQuery = originalParentQuery.rewrite(reader);
rewriteIndexReader = reader;
}
return this;
}
@Override
public void extractTerms(Set<Term> terms) {
rewrittenParentQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searcher.getTopReaderContext().leaves());
Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents = searchContext.cacheRecycler().hashSet(-1);
ParentUidsCollector collector = new ParentUidsCollector(parents.v(), searchContext, parentType);
final Query parentQuery;
if (rewrittenParentQuery != null) {
parentQuery = rewrittenParentQuery;
} else {
assert rewriteIndexReader == searcher.getIndexReader();
parentQuery = rewrittenParentQuery = originalParentQuery.rewrite(searcher.getIndexReader());
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.search(parentQuery, collector);
if (parents.v().isEmpty()) {
return Queries.NO_MATCH_QUERY.createWeight(searcher);
}
ChildrenWeight childrenWeight = new ChildrenWeight(searchContext, parents);
searchContext.addReleasable(childrenWeight);
return childrenWeight;
}
private final class ChildrenWeight extends Weight implements Releasable {
private final SearchContext searchContext;
private final Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents;
private float queryNorm;
private float queryWeight;
private ChildrenWeight(SearchContext searchContext, Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents) {
this.searchContext = searchContext;
this.parents = parents;
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ParentConstantScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
if (!applyAcceptedDocs) {
acceptDocs = null;
}
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocIdSet)) {
return null;
}
Bits childrenBits = DocIdSets.toSafeBits(context.reader(), childrenDocIdSet);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
DocIdSet docIdSet = new ChildrenDocSet(context.reader(), childrenBits, parents.v(), idReaderTypeCache);
return ConstantScorer.create(docIdSet, this, queryWeight);
} else {
return null;
}
}
@Override
public boolean release() throws ElasticSearchException {
RecyclerUtils.release(parents);
return true;
}
private final class ChildrenDocSet extends MatchDocIdSet {
private final ObjectOpenHashSet<HashedBytesArray> parents;
private final IdReaderTypeCache idReaderTypeCache;
ChildrenDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
super(reader.maxDoc(), acceptDocs);
this.parents = parents;
this.idReaderTypeCache = idReaderTypeCache;
}
@Override
protected boolean matchDoc(int doc) {
return parents.contains(idReaderTypeCache.parentIdByDoc(doc));
}
}
}
private final static class ParentUidsCollector extends NoopCollector {
private final ObjectOpenHashSet<HashedBytesArray> collectedUids;
private final SearchContext context;
private final String parentType;
private IdReaderTypeCache typeCache;
ParentUidsCollector(ObjectOpenHashSet<HashedBytesArray> collectedUids, SearchContext context, String parentType) {
this.collectedUids = collectedUids;
this.context = context;
this.parentType = parentType;
}
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (typeCache != null) {
collectedUids.add(typeCache.idByDoc(doc));
}
}
@Override
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
}
}
@Override
public int hashCode() {
int result = originalParentQuery.hashCode();
result = 31 * result + parentType.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
ParentConstantScoreQuery that = (ParentConstantScoreQuery) obj;
if (!originalParentQuery.equals(that.originalParentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
return true;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("parent_filter[").append(parentType).append("](").append(originalParentQuery).append(')');
return sb.toString();
}
}

View File

@ -26,13 +26,15 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.recycler.RecyclerUtils;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.SearchContext;
@ -45,49 +47,21 @@ import java.util.Set;
* using the {@link IdReaderTypeCache}.
*/
// TODO We use a score of 0 to indicate a doc was not scored in uidToScore, this means score of 0 can be problematic, if we move to HPCC, we can use lset/...
public class ParentQuery extends Query implements SearchContext.Rewrite {
public class ParentQuery extends Query {
private final SearchContext searchContext;
private final Query originalParentQuery;
private final String parentType;
private final Filter childrenFilter;
private Query rewrittenParentQuery;
private Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
private IndexReader rewriteIndexReader;
public ParentQuery(SearchContext searchContext, Query parentQuery, String parentType, Filter childrenFilter) {
this.searchContext = searchContext;
public ParentQuery(Query parentQuery, String parentType, Filter childrenFilter) {
this.originalParentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
}
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
uidToScore = searchContext.cacheRecycler().objectFloatMap(-1);
ParentUidCollector collector = new ParentUidCollector(uidToScore.v(), searchContext, parentType);
Query parentQuery;
if (rewrittenParentQuery == null) {
parentQuery = rewrittenParentQuery = searchContext.searcher().rewrite(originalParentQuery);
} else {
parentQuery = rewrittenParentQuery;
}
searchContext.searcher().search(parentQuery, collector);
}
@Override
public void executionDone() {
if (uidToScore != null) {
uidToScore.release();
}
uidToScore = null;
}
@Override
public void contextClear() {
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
@ -127,6 +101,7 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenParentQuery == null) {
rewriteIndexReader = reader;
rewrittenParentQuery = originalParentQuery.rewrite(reader);
}
return this;
@ -139,24 +114,39 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
if (uidToScore == null) {
throw new ElasticSearchIllegalStateException("has_parent query hasn't executed properly");
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore = searchContext.cacheRecycler().objectFloatMap(-1);
ParentUidCollector collector = new ParentUidCollector(uidToScore.v(), searchContext, parentType);
final Query parentQuery;
if (rewrittenParentQuery == null) {
parentQuery = rewrittenParentQuery = searcher.rewrite(originalParentQuery);
} else {
assert rewriteIndexReader == searcher.getIndexReader();
parentQuery = rewrittenParentQuery;
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.search(parentQuery, collector);
if (uidToScore.v().isEmpty()) {
uidToScore.release();
return Queries.NO_MATCH_QUERY.createWeight(searcher);
}
return new ChildWeight(rewrittenParentQuery.createWeight(searcher));
ChildWeight childWeight = new ChildWeight(parentQuery.createWeight(searcher), searchContext, uidToScore);
searchContext.addReleasable(childWeight);
return childWeight;
}
static class ParentUidCollector extends NoopCollector {
private static class ParentUidCollector extends NoopCollector {
final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
final SearchContext searchContext;
final String parentType;
private final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
private final SearchContext searchContext;
private final String parentType;
Scorer scorer;
IdReaderTypeCache typeCache;
private Scorer scorer;
private IdReaderTypeCache typeCache;
ParentUidCollector(ObjectFloatOpenHashMap<HashedBytesArray> uidToScore, SearchContext searchContext, String parentType) {
this.uidToScore = uidToScore;
@ -185,12 +175,16 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
}
}
class ChildWeight extends Weight {
private class ChildWeight extends Weight implements Releasable {
private final Weight parentWeight;
private final SearchContext searchContext;
private final Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore;
ChildWeight(Weight parentWeight) {
private ChildWeight(Weight parentWeight, SearchContext searchContext, Recycler.V<ObjectFloatOpenHashMap<HashedBytesArray>> uidToScore) {
this.parentWeight = parentWeight;
this.searchContext = searchContext;
this.uidToScore = uidToScore;
}
@Override
@ -227,16 +221,22 @@ public class ParentQuery extends Query implements SearchContext.Rewrite {
return new ChildScorer(this, uidToScore.v(), childrenDocSet.iterator(), idTypeCache);
}
@Override
public boolean release() throws ElasticSearchException {
RecyclerUtils.release(uidToScore);
return true;
}
}
static class ChildScorer extends Scorer {
private static class ChildScorer extends Scorer {
final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
final DocIdSetIterator childrenIterator;
final IdReaderTypeCache typeCache;
private final ObjectFloatOpenHashMap<HashedBytesArray> uidToScore;
private final DocIdSetIterator childrenIterator;
private final IdReaderTypeCache typeCache;
int currentChildDoc = -1;
float currentScore;
private int currentChildDoc = -1;
private float currentScore;
ChildScorer(Weight weight, ObjectFloatOpenHashMap<HashedBytesArray> uidToScore, DocIdSetIterator childrenIterator, IdReaderTypeCache typeCache) {
super(weight);

View File

@ -25,11 +25,14 @@ import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.search.EmptyScorer;
import org.elasticsearch.common.recycler.Recycler;
import org.elasticsearch.common.recycler.RecyclerUtils;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -51,7 +54,9 @@ import java.util.Set;
* This query is most of the times faster than the {@link ChildrenQuery}. Usually enough parent documents can be returned
* in the first child document query round.
*/
public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
public class TopChildrenQuery extends Query {
private static final ParentDocComparator PARENT_DOC_COMP = new ParentDocComparator();
private final CacheRecycler cacheRecycler;
private final String parentType;
@ -63,7 +68,7 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
// This field will hold the rewritten form of originalChildQuery, so that we can reuse it
private Query rewrittenChildQuery;
private Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs;
private IndexReader rewriteIndexReader;
// Note, the query is expected to already be filtered to only child type docs
public TopChildrenQuery(Query childQuery, String childType, String parentType, ScoreType scoreType, int factor, int incrementalFactor, CacheRecycler cacheRecycler) {
@ -77,13 +82,14 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
}
// Rewrite invocation logic:
// 1) query_then_fetch (default): First contextRewrite and then rewrite is executed
// 2) dfs_query_then_fetch:: First rewrite and then contextRewrite is executed. During query phase rewrite isn't
// 1) query_then_fetch (default): Rewrite is execute as part of the createWeight invocation, when search child docs.
// 2) dfs_query_then_fetch:: First rewrite and then createWeight is executed. During query phase rewrite isn't
// executed any more because searchContext#queryRewritten() returns true.
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenChildQuery == null) {
rewrittenChildQuery = originalChildQuery.rewrite(reader);
rewriteIndexReader = reader;
}
// We can always return the current instance, and we can do this b/c the child query is executed separately
// before the main query (other scope) in a different IS#search() invocation than the main query.
@ -93,30 +99,39 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
}
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
this.parentDocs = cacheRecycler.hashMap(-1);
public void extractTerms(Set<Term> terms) {
rewrittenChildQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1);
SearchContext searchContext = SearchContext.current();
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
int parentHitsResolved;
int numChildDocs = (searchContext.from() + searchContext.size());
if (numChildDocs == 0) {
numChildDocs = 1;
int requestedDocs = (searchContext.from() + searchContext.size());
if (requestedDocs <= 0) {
requestedDocs = 1;
}
numChildDocs *= factor;
int numChildDocs = requestedDocs * factor;
Query childQuery;
if (rewrittenChildQuery == null) {
childQuery = rewrittenChildQuery = searchContext.searcher().rewrite(originalChildQuery);
childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
} else {
assert rewriteIndexReader == searcher.getIndexReader();
childQuery = rewrittenChildQuery;
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
while (true) {
parentDocs.v().clear();
TopDocs topChildDocs = searchContext.searcher().search(childQuery, numChildDocs);
parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext);
TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs);
parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs);
// check if we found enough docs, if so, break
if (parentHitsResolved >= (searchContext.from() + searchContext.size())) {
if (parentHitsResolved >= requestedDocs) {
break;
}
// if we did not find enough docs, check if it make sense to search further
@ -129,21 +144,11 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
numChildDocs = topChildDocs.totalHits;
}
}
return new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs);
}
@Override
public void contextClear() {
}
@Override
public void executionDone() {
if (parentDocs != null) {
parentDocs.release();
}
}
int resolveParentDocuments(TopDocs topDocs, SearchContext context) {
int resolveParentDocuments(TopDocs topDocs, SearchContext context, Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs) {
int parentHitsResolved = 0;
Recycler.V<ObjectObjectOpenHashMap<Object, Recycler.V<IntObjectOpenHashMap<ParentDoc>>>> parentDocsPerReader = cacheRecycler.hashMap(context.searcher().getIndexReader().leaves().size());
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
@ -196,10 +201,9 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
for (int i = 0; i < states.length; i++) {
if (states[i]) {
Recycler.V<IntObjectOpenHashMap<ParentDoc>> value = (Recycler.V<IntObjectOpenHashMap<ParentDoc>>) values[i];
ParentDoc[] parentDocs = value.v().values().toArray(ParentDoc.class);
Arrays.sort(parentDocs, PARENT_DOC_COMP);
this.parentDocs.v().put(keys[i], parentDocs);
ParentDoc[] _parentDocs = value.v().values().toArray(ParentDoc.class);
Arrays.sort(_parentDocs, PARENT_DOC_COMP);
parentDocs.v().put(keys[i], _parentDocs);
value.release();
}
}
@ -207,36 +211,6 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
return parentHitsResolved;
}
private static final ParentDocComparator PARENT_DOC_COMP = new ParentDocComparator();
static class ParentDocComparator implements Comparator<ParentDoc> {
@Override
public int compare(ParentDoc o1, ParentDoc o2) {
return o1.docId - o2.docId;
}
}
static class ParentDoc {
public int docId;
public int count;
public float maxScore = Float.NaN;
public float sumScores = 0;
}
@Override
public void extractTerms(Set<Term> terms) {
rewrittenChildQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
if (parentDocs == null) {
throw new ElasticSearchIllegalStateException("top_children query hasn't executed properly");
}
return new ParentWeight(searcher, rewrittenChildQuery.createWeight(searcher));
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
@ -274,15 +248,14 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
return sb.toString();
}
class ParentWeight extends Weight {
private class ParentWeight extends Weight implements Releasable {
final IndexSearcher searcher;
private final Weight queryWeight;
private final Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs;
final Weight queryWeight;
public ParentWeight(IndexSearcher searcher, Weight queryWeight) throws IOException {
this.searcher = searcher;
public ParentWeight(Weight queryWeight, Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs) throws IOException {
this.queryWeight = queryWeight;
this.parentDocs = parentDocs;
}
public Query getQuery() {
@ -301,6 +274,12 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
// Nothing to normalize
}
@Override
public boolean release() throws ElasticSearchException {
RecyclerUtils.release(parentDocs);
return true;
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
ParentDoc[] readerParentDocs = parentDocs.v().get(context.reader().getCoreCacheKey());
@ -342,7 +321,8 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
}
}
static abstract class ParentScorer extends Scorer {
private static abstract class ParentScorer extends Scorer {
private final ParentDoc spare = new ParentDoc();
protected final ParentDoc[] docs;
protected ParentDoc doc = spare;
@ -388,4 +368,19 @@ public class TopChildrenQuery extends Query implements SearchContext.Rewrite {
return docs.length;
}
}
private static class ParentDocComparator implements Comparator<ParentDoc> {
@Override
public int compare(ParentDoc o1, ParentDoc o2) {
return o1.docId - o2.docId;
}
}
private static class ParentDoc {
public int docId;
public int count;
public float maxScore = Float.NaN;
public float sumScores = 0;
}
}

View File

@ -13,7 +13,9 @@ import java.util.Set;
/**
* A special query that accepts a top level parent matching query, and returns the nested docs of the matching parent
* doc as well. This is handy when deleting by query.
* doc as well. This is handy when deleting by query, don't use it for other purposes.
*
* @elasticsearch.internal
*/
public class IncludeNestedDocsQuery extends Query {

View File

@ -31,6 +31,7 @@ import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.percolate.PercolateShardRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.index.analysis.AnalysisService;
@ -269,6 +270,11 @@ public class PercolateContext extends SearchContext {
}
// Unused:
@Override
public boolean clearAndRelease() {
throw new UnsupportedOperationException();
}
@Override
public void preProcess() {
throw new UnsupportedOperationException();
@ -284,6 +290,11 @@ public class PercolateContext extends SearchContext {
throw new UnsupportedOperationException();
}
@Override
public String source() {
throw new UnsupportedOperationException();
}
@Override
public ShardSearchRequest request() {
throw new UnsupportedOperationException();
@ -625,12 +636,12 @@ public class PercolateContext extends SearchContext {
}
@Override
public void addRewrite(Rewrite rewrite) {
public void addReleasable(Releasable releasable) {
throw new UnsupportedOperationException();
}
@Override
public List<Rewrite> rewrites() {
public void clearReleasables() {
throw new UnsupportedOperationException();
}

View File

@ -40,8 +40,7 @@ public class ContextIndexSearcher extends IndexSearcher {
public static enum Stage {
NA,
MAIN_QUERY,
REWRITE
MAIN_QUERY
}
private final SearchContext searchContext;
@ -117,11 +116,16 @@ public class ContextIndexSearcher extends IndexSearcher {
@Override
public Weight createNormalizedWeight(Query query) throws IOException {
// if its the main query, use we have dfs data, only then do it
if (dfSource != null && (query == searchContext.query() || query == searchContext.parsedQuery().query())) {
return dfSource.createNormalizedWeight(query);
try {
// if its the main query, use we have dfs data, only then do it
if (dfSource != null && (query == searchContext.query() || query == searchContext.parsedQuery().query())) {
return dfSource.createNormalizedWeight(query);
}
return super.createNormalizedWeight(query);
} catch (Throwable t) {
searchContext.clearReleasables();
throw new RuntimeException(t);
}
return super.createNormalizedWeight(query);
}
@Override
@ -152,37 +156,45 @@ public class ContextIndexSearcher extends IndexSearcher {
}
// we only compute the doc id set once since within a context, we execute the same query always...
if (searchContext.timeoutInMillis() != -1) {
try {
try {
if (searchContext.timeoutInMillis() != -1) {
try {
super.search(leaves, weight, collector);
} catch (TimeLimitingCollector.TimeExceededException e) {
searchContext.queryResult().searchTimedOut(true);
}
} else {
super.search(leaves, weight, collector);
} catch (TimeLimitingCollector.TimeExceededException e) {
searchContext.queryResult().searchTimedOut(true);
}
} else {
super.search(leaves, weight, collector);
}
if (currentState == Stage.MAIN_QUERY) {
if (enableMainDocIdSetCollector) {
enableMainDocIdSetCollector = false;
mainDocIdSetCollector.postCollection();
}
if (queryCollectors != null && !queryCollectors.isEmpty()) {
for (Collector queryCollector : queryCollectors) {
if (queryCollector instanceof XCollector) {
((XCollector) queryCollector).postCollection();
if (currentState == Stage.MAIN_QUERY) {
if (enableMainDocIdSetCollector) {
enableMainDocIdSetCollector = false;
mainDocIdSetCollector.postCollection();
}
if (queryCollectors != null && !queryCollectors.isEmpty()) {
for (Collector queryCollector : queryCollectors) {
if (queryCollector instanceof XCollector) {
((XCollector) queryCollector).postCollection();
}
}
}
}
} finally {
searchContext.clearReleasables();
}
}
@Override
public Explanation explain(Query query, int doc) throws IOException {
if (searchContext.aliasFilter() == null) {
return super.explain(query, doc);
try {
if (searchContext.aliasFilter() == null) {
return super.explain(query, doc);
}
XFilteredQuery filteredQuery = new XFilteredQuery(query, searchContext.aliasFilter());
return super.explain(filteredQuery, doc);
} finally {
searchContext.clearReleasables();
}
XFilteredQuery filteredQuery = new XFilteredQuery(query, searchContext.aliasFilter());
return super.explain(filteredQuery, doc);
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
@ -161,7 +162,7 @@ public class DefaultSearchContext extends SearchContext {
private volatile long lastAccessTime;
private List<SearchContext.Rewrite> rewrites = null;
private List<Releasable> clearables = null;
public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget,
@ -192,16 +193,16 @@ public class DefaultSearchContext extends SearchContext {
scanContext.clear();
}
// clear and scope phase we have
if (rewrites != null) {
for (SearchContext.Rewrite rewrite : rewrites) {
rewrite.contextClear();
}
}
searcher.release();
engineSearcher.release();
return true;
}
public boolean clearAndRelease() {
clearReleasables();
return release();
}
/**
* Should be called before executing the main query and after all other parameters have been set.
*/
@ -241,6 +242,10 @@ public class DefaultSearchContext extends SearchContext {
return this.id;
}
public String source() {
return engineSearcher.source();
}
public ShardSearchRequest request() {
return this.request;
}
@ -612,15 +617,31 @@ public class DefaultSearchContext extends SearchContext {
return fetchResult;
}
public void addRewrite(Rewrite rewrite) {
if (this.rewrites == null) {
this.rewrites = new ArrayList<Rewrite>();
@Override
public void addReleasable(Releasable releasable) {
if (clearables == null) {
clearables = new ArrayList<Releasable>();
}
this.rewrites.add(rewrite);
clearables.add(releasable);
}
public List<Rewrite> rewrites() {
return this.rewrites;
@Override
public void clearReleasables() {
if (clearables != null) {
Throwable th = null;
for (Releasable releasable : clearables) {
try {
releasable.release();
} catch (Throwable t) {
if (th == null) {
th = t;
}
}
}
if (th != null) {
throw new RuntimeException(th);
}
}
}
public ScanContext scanContext() {

View File

@ -77,14 +77,7 @@ public abstract class SearchContext implements Releasable {
return current.get();
}
public static interface Rewrite {
void contextRewrite(SearchContext searchContext) throws Exception;
void executionDone();
void contextClear();
}
public abstract boolean clearAndRelease();
/**
* Should be called before executing the main query and after all other parameters have been set.
@ -95,6 +88,8 @@ public abstract class SearchContext implements Releasable {
public abstract long id();
public abstract String source();
public abstract ShardSearchRequest request();
public abstract SearchType searchType();
@ -275,9 +270,9 @@ public abstract class SearchContext implements Releasable {
public abstract FetchSearchResult fetchResult();
public abstract void addRewrite(Rewrite rewrite);
public abstract void addReleasable(Releasable releasable);
public abstract List<Rewrite> rewrites();
public abstract void clearReleasables();
public abstract ScanContext scanContext();

View File

@ -36,7 +36,6 @@ import org.elasticsearch.search.sort.SortParseElement;
import org.elasticsearch.search.sort.TrackScoresParseElement;
import org.elasticsearch.search.suggest.SuggestPhase;
import java.util.List;
import java.util.Map;
/**
@ -88,20 +87,6 @@ public class QueryPhase implements SearchPhase {
public void execute(SearchContext searchContext) throws QueryPhaseExecutionException {
searchContext.queryResult().searchTimedOut(false);
List<SearchContext.Rewrite> rewrites = searchContext.rewrites();
if (rewrites != null) {
try {
searchContext.searcher().inStage(ContextIndexSearcher.Stage.REWRITE);
for (SearchContext.Rewrite rewrite : rewrites) {
rewrite.contextRewrite(searchContext);
}
} catch (Exception e) {
throw new QueryPhaseExecutionException(searchContext, "failed to execute context rewrite", e);
} finally {
searchContext.searcher().finishStage(ContextIndexSearcher.Stage.REWRITE);
}
}
searchContext.searcher().inStage(ContextIndexSearcher.Stage.MAIN_QUERY);
boolean rescore = false;
try {
@ -134,7 +119,7 @@ public class QueryPhase implements SearchPhase {
topDocs = searchContext.searcher().search(query, numDocs);
}
searchContext.queryResult().topDocs(topDocs);
} catch (Exception e) {
} catch (Throwable e) {
throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e);
} finally {
searchContext.searcher().finishStage(ContextIndexSearcher.Stage.MAIN_QUERY);
@ -144,11 +129,5 @@ public class QueryPhase implements SearchPhase {
}
suggestPhase.execute(searchContext);
facetPhase.execute(searchContext);
if (rewrites != null) {
for (SearchContext.Rewrite rewrite : rewrites) {
rewrite.executionDone();
}
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.child;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
@ -29,6 +30,7 @@ import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.sort.SortBuilders;
@ -215,125 +217,52 @@ public class SimpleChildQuerySearchTests extends AbstractIntegrationTest {
assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1"));
// TOP CHILDREN QUERY
searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "yellow"))).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "blue"))).execute()
.actionGet();
if (searchResponse.getFailedShards() > 0) {
logger.warn("Failed shards:");
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
logger.warn("-> {}", shardSearchFailure);
}
}
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "blue")))
.execute().actionGet();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
searchResponse = client().prepareSearch("test").setQuery(topChildrenQuery("child", termQuery("c_field", "red"))).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
assertHitCount(searchResponse, 2l);
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
// HAS CHILD QUERY
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"))).execute()
.actionGet();
if (searchResponse.getFailedShards() > 0) {
logger.warn("Failed shards:");
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
logger.warn("-> {}", shardSearchFailure);
}
}
assertThat(searchResponse.getFailedShards(), equalTo(0));
// HAS CHILD
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow"))
.execute().actionGet();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"))).execute()
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")).execute().actionGet();
assertHitCount(searchResponse, 2l);
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
// HAS CHILD FILTER
// HAS PARENT
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).execute().actionGet();
.setQuery(randomHasParent("parent", "p_field", "p_value2")).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p1"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue"))))
.execute().actionGet();
if (searchResponse.getFailedShards() > 0) {
logger.warn("Failed shards:");
for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
logger.warn("-> {}", shardSearchFailure);
}
}
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("p2"));
searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red"))))
.execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("p2"), equalTo("p1")));
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
// HAS PARENT FILTER
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "p_value2")))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
assertHitCount(searchResponse, 2l);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c4"));
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "p_value1")))).execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
.setQuery(randomHasParent("parent", "p_field", "p_value1")).execute().actionGet();
assertHitCount(searchResponse, 2l);
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c2"));
// HAS PARENT QUERY
searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value2"))).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c3"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c4"));
searchResponse = client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"))).execute()
.actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getFailedShards(), equalTo(0));
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("c1"));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo("c2"));
}
@Test
@ -980,7 +909,6 @@ public class SimpleChildQuerySearchTests extends AbstractIntegrationTest {
@Test
public void testCountApiUsage() throws Exception {
client().admin().indices().prepareCreate("test")
.setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
.execute().actionGet();
@ -994,34 +922,76 @@ public class SimpleChildQuerySearchTests extends AbstractIntegrationTest {
.endObject().endObject()).execute().actionGet();
String parentId = "p1";
client().prepareIndex("test", "parent", parentId).setSource("p_field", "p_value1").execute().actionGet();
client().prepareIndex("test", "child", parentId + "_c1").setSource("c_field1", parentId).setParent(parentId).execute().actionGet();
client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").execute().actionGet();
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
CountResponse countResponse = client().prepareCount("test").setQuery(topChildrenQuery("child", termQuery("c_field1", "1")))
CountResponse countResponse = client().prepareCount("test").setQuery(topChildrenQuery("child", termQuery("c_field", "1")))
.execute().actionGet();
assertThat(countResponse.getFailedShards(), equalTo(1));
assertThat(countResponse.getShardFailures()[0].reason().contains("top_children query hasn't executed properly"), equalTo(true));
assertHitCount(countResponse, 1l);
countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field1", "2")).scoreType("max"))
countResponse = client().prepareCount("test").setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.execute().actionGet();
assertThat(countResponse.getFailedShards(), equalTo(1));
assertThat(countResponse.getShardFailures()[0].reason().contains("has_child query hasn't executed properly"), equalTo(true));
assertHitCount(countResponse, 1l);
countResponse = client().prepareCount("test").setQuery(hasParentQuery("parent", termQuery("p_field1", "1")).scoreType("score"))
countResponse = client().prepareCount("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score"))
.execute().actionGet();
assertThat(countResponse.getFailedShards(), equalTo(1));
assertThat(countResponse.getShardFailures()[0].reason().contains("has_parent query hasn't executed properly"), equalTo(true));
assertHitCount(countResponse, 1l);
countResponse = client().prepareCount("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field1", "2"))))
countResponse = client().prepareCount("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "1"))))
.execute().actionGet();
assertThat(countResponse.getFailedShards(), equalTo(1));
assertThat(countResponse.getShardFailures()[0].reason().contains("has_child filter hasn't executed properly"), equalTo(true));
assertHitCount(countResponse, 1l);
countResponse = client().prepareCount("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field1", "1"))))
countResponse = client().prepareCount("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "1"))))
.execute().actionGet();
assertThat(countResponse.getFailedShards(), equalTo(1));
assertThat(countResponse.getShardFailures()[0].reason().contains("has_parent filter hasn't executed properly"), equalTo(true));
assertHitCount(countResponse, 1l);
}
@Test
public void testExplainUsage() throws Exception {
client().admin().indices().prepareCreate("test")
.setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
.execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin()
.indices()
.preparePutMapping("test")
.setType("child")
.setSource(
jsonBuilder().startObject().startObject("type").startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
String parentId = "p1";
client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").execute().actionGet();
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch("test")
.setExplain(true)
.setQuery(topChildrenQuery("child", termQuery("c_field", "1")))
.execute().actionGet();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
searchResponse = client().prepareSearch("test")
.setExplain(true)
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.execute().actionGet();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
searchResponse = client().prepareSearch("test")
.setExplain(true)
.setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score"))
.execute().actionGet();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId)
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.execute().actionGet();
assertThat(explainResponse.isExists(), equalTo(true));
assertThat(explainResponse.getExplanation().getDescription(), equalTo("not implemented yet..."));
}
@Test
@ -1602,4 +1572,204 @@ public class SimpleChildQuerySearchTests extends AbstractIntegrationTest {
assertHitCount(response, 2l);
}
@Test
public void testHasChildNotBeingCached() throws ElasticSearchException, IOException {
client().admin().indices().prepareCreate("test")
.setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
// index simple data
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").execute().actionGet();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").execute().actionGet();
client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").execute().actionGet();
client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").execute().actionGet();
client().prepareIndex("test", "parent", "p5").setSource("p_field", "p_value5").execute().actionGet();
client().prepareIndex("test", "parent", "p6").setSource("p_field", "p_value6").execute().actionGet();
client().prepareIndex("test", "parent", "p7").setSource("p_field", "p_value7").execute().actionGet();
client().prepareIndex("test", "parent", "p8").setSource("p_field", "p_value8").execute().actionGet();
client().prepareIndex("test", "parent", "p9").setSource("p_field", "p_value9").execute().actionGet();
client().prepareIndex("test", "parent", "p10").setSource("p_field", "p_value10").execute().actionGet();
client().prepareIndex("test", "child", "c1").setParent("p1").setSource("c_field", "blue").execute().actionGet();
client().admin().indices().prepareFlush("test").execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).cache(true)))
.execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
client().prepareIndex("test", "child", "c2").setParent("p2").setSource("c_field", "blue").execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).cache(true)))
.execute().actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
}
@Test
public void testDeleteByQuery_has_child() throws Exception {
client().admin().indices().prepareCreate("test")
.setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put("index.refresh_interval", "-1")
).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
// index simple data
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").execute().actionGet();
client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").execute().actionGet();
client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").execute().actionGet();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").execute().actionGet();
client().admin().indices().prepareFlush("test").execute().actionGet();
client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").execute().actionGet();
client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").execute().actionGet();
client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").execute().actionGet();
client().admin().indices().prepareFlush("test").execute().actionGet();
client().prepareIndex("test", "child", "c5").setSource("c_field", "blue").setParent("p3").execute().actionGet();
client().prepareIndex("test", "child", "c6").setSource("c_field", "red").setParent("p3").execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
// p4 will not be found via search api, but will be deleted via delete_by_query api!
client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").execute().actionGet();
client().prepareIndex("test", "child", "c7").setSource("c_field", "blue").setParent("p4").execute().actionGet();
client().prepareIndex("test", "child", "c8").setSource("c_field", "red").setParent("p4").execute().actionGet();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(randomHasChild("child", "c_field", "blue"))
.execute().actionGet();
assertHitCount(searchResponse, 2l);
client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
searchResponse = client().prepareSearch("test")
.setQuery(randomHasChild("child", "c_field", "blue"))
.execute().actionGet();
assertHitCount(searchResponse, 0l);
}
@Test
public void testDeleteByQuery_has_child_SingleRefresh() throws Exception {
client().admin().indices().prepareCreate("test")
.setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put("index.refresh_interval", "-1")
).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
// index simple data
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").execute().actionGet();
client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").execute().actionGet();
client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").execute().actionGet();
client().admin().indices().prepareFlush().execute().actionGet();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").execute().actionGet();
client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").execute().actionGet();
client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").execute().actionGet();
client().prepareIndex("test", "parent", "p3").setSource("p_field", "p_value3").execute().actionGet();
client().prepareIndex("test", "child", "c5").setSource("c_field", "blue").setParent("p3").execute().actionGet();
client().prepareIndex("test", "child", "c6").setSource("c_field", "red").setParent("p3").execute().actionGet();
client().prepareIndex("test", "parent", "p4").setSource("p_field", "p_value4").execute().actionGet();
client().prepareIndex("test", "child", "c7").setSource("c_field", "blue").setParent("p4").execute().actionGet();
client().prepareIndex("test", "child", "c8").setSource("c_field", "red").setParent("p4").execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(randomHasChild("child", "c_field", "blue"))
.execute().actionGet();
assertHitCount(searchResponse, 3l);
client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
searchResponse = client().prepareSearch("test")
.setQuery(randomHasChild("child", "c_field", "blue"))
.execute().actionGet();
assertHitCount(searchResponse, 0l);
}
private QueryBuilder randomHasChild(String type, String field, String value) {
if (randomBoolean()) {
if (randomBoolean()) {
return constantScoreQuery(hasChildFilter(type, termQuery(field, value)));
} else {
return filteredQuery(matchAllQuery(), hasChildFilter(type, termQuery(field, value)));
}
} else {
return hasChildQuery(type, termQuery(field, value));
}
}
@Test
public void testDeleteByQuery_has_parent() throws Exception {
client().admin().indices().prepareCreate("test")
.setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 1)
.put("index.refresh_interval", "-1")
).execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).execute().actionGet();
// index simple data
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").execute().actionGet();
client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").execute().actionGet();
client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").execute().actionGet();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").execute().actionGet();
client().admin().indices().prepareFlush("test").execute().actionGet();
client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").execute().actionGet();
client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").execute().actionGet();
client().admin().indices().prepareRefresh().execute().actionGet();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(randomHasParent("parent", "p_field", "p_value2"))
.execute().actionGet();
assertHitCount(searchResponse, 2l);
client().prepareDeleteByQuery("test")
.setQuery(randomHasParent("parent", "p_field", "p_value2"))
.execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
client().admin().indices().prepareRefresh("test").execute().actionGet();
searchResponse = client().prepareSearch("test")
.setQuery(randomHasParent("parent", "p_field", "p_value2"))
.execute().actionGet();
assertHitCount(searchResponse, 0l);
}
private QueryBuilder randomHasParent(String type, String field, String value) {
if (randomBoolean()) {
if (randomBoolean()) {
return constantScoreQuery(hasParentFilter(type, termQuery(field, value)));
} else {
return filteredQuery(matchAllQuery(), hasParentFilter(type, termQuery(field, value)));
}
} else {
return hasParentQuery(type, termQuery(field, value));
}
}
}

View File

@ -176,7 +176,7 @@ public class SimpleValidateQueryTests extends AbstractIntegrationTest {
"child-type",
QueryBuilders.fieldQuery("foo", "1")
)
), equalTo("filtered(foo:1)->child_filter[child-type/type1](filtered(foo:1)->cache(_type:child-type))"));
), equalTo("filtered(foo:1)->CustomQueryWrappingFilter(child_filter[child-type/type1](filtered(foo:1)->cache(_type:child-type)))"));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),