Removed `execution_type` for has_child and has_parent.
This commit is contained in:
parent
20ce01bd53
commit
9c3a86875b
|
@ -32,7 +32,6 @@ public class HasChildFilterBuilder extends BaseFilterBuilder {
|
||||||
private final QueryBuilder queryBuilder;
|
private final QueryBuilder queryBuilder;
|
||||||
private String childType;
|
private String childType;
|
||||||
private String filterName;
|
private String filterName;
|
||||||
private String executionType;
|
|
||||||
|
|
||||||
public HasChildFilterBuilder(String type, QueryBuilder queryBuilder) {
|
public HasChildFilterBuilder(String type, QueryBuilder queryBuilder) {
|
||||||
this.childType = type;
|
this.childType = type;
|
||||||
|
@ -54,16 +53,6 @@ public class HasChildFilterBuilder extends BaseFilterBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Expert: Sets the low level child to parent filtering implementation. Can be: 'bitset' or 'uid'
|
|
||||||
* <p/>
|
|
||||||
* This option is experimental and will be removed.
|
|
||||||
*/
|
|
||||||
public HasChildFilterBuilder executionType(String executionType) {
|
|
||||||
this.executionType = executionType;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(HasChildFilterParser.NAME);
|
builder.startObject(HasChildFilterParser.NAME);
|
||||||
|
@ -78,9 +67,6 @@ public class HasChildFilterBuilder extends BaseFilterBuilder {
|
||||||
if (filterName != null) {
|
if (filterName != null) {
|
||||||
builder.field("_name", filterName);
|
builder.field("_name", filterName);
|
||||||
}
|
}
|
||||||
if (executionType != null) {
|
|
||||||
builder.field("execution_type", executionType);
|
|
||||||
}
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,6 @@ public class HasChildFilterParser implements FilterParser {
|
||||||
boolean queryFound = false;
|
boolean queryFound = false;
|
||||||
String childType = null;
|
String childType = null;
|
||||||
|
|
||||||
String executionType = "uid";
|
|
||||||
String filterName = null;
|
String filterName = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
|
@ -94,8 +93,6 @@ public class HasChildFilterParser implements FilterParser {
|
||||||
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] filter has been removed, use a filter as a facet_filter in the relevant global facet");
|
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] filter has been removed, use a filter as a facet_filter in the relevant global facet");
|
||||||
} else if ("_name".equals(currentFieldName)) {
|
} else if ("_name".equals(currentFieldName)) {
|
||||||
filterName = parser.text();
|
filterName = parser.text();
|
||||||
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {// This option is experimental and will most likely be removed.
|
|
||||||
executionType = parser.text();
|
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext.index(), "[has_child] filter does not support [" + currentFieldName + "]");
|
throw new QueryParsingException(parseContext.index(), "[has_child] filter does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
@ -125,7 +122,7 @@ public class HasChildFilterParser implements FilterParser {
|
||||||
|
|
||||||
SearchContext searchContext = SearchContext.current();
|
SearchContext searchContext = SearchContext.current();
|
||||||
|
|
||||||
HasChildFilter childFilter = HasChildFilter.create(query, parentType, childType, searchContext, executionType);
|
HasChildFilter childFilter = HasChildFilter.create(query, parentType, childType, searchContext);
|
||||||
searchContext.addRewrite(childFilter);
|
searchContext.addRewrite(childFilter);
|
||||||
|
|
||||||
if (filterName != null) {
|
if (filterName != null) {
|
||||||
|
|
|
@ -36,8 +36,6 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
|
||||||
|
|
||||||
private String scoreType;
|
private String scoreType;
|
||||||
|
|
||||||
private String executionType;
|
|
||||||
|
|
||||||
public HasChildQueryBuilder(String type, QueryBuilder queryBuilder) {
|
public HasChildQueryBuilder(String type, QueryBuilder queryBuilder) {
|
||||||
this.childType = type;
|
this.childType = type;
|
||||||
this.queryBuilder = queryBuilder;
|
this.queryBuilder = queryBuilder;
|
||||||
|
@ -60,17 +58,6 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Expert: Sets the low level child to parent filtering implementation. Can be: 'bitset' or 'uid'
|
|
||||||
* Only applicable when score_type is set to none.
|
|
||||||
* <p/>
|
|
||||||
* This option is experimental and will be removed.
|
|
||||||
*/
|
|
||||||
public HasChildQueryBuilder executionType(String executionType) {
|
|
||||||
this.executionType = executionType;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(HasChildQueryParser.NAME);
|
builder.startObject(HasChildQueryParser.NAME);
|
||||||
|
@ -83,9 +70,6 @@ public class HasChildQueryBuilder extends BaseQueryBuilder implements BoostableQ
|
||||||
if (scoreType != null) {
|
if (scoreType != null) {
|
||||||
builder.field("score_type", scoreType);
|
builder.field("score_type", scoreType);
|
||||||
}
|
}
|
||||||
if (executionType != null) {
|
|
||||||
builder.field("execution_type", executionType);
|
|
||||||
}
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,7 +58,6 @@ public class HasChildQueryParser implements QueryParser {
|
||||||
float boost = 1.0f;
|
float boost = 1.0f;
|
||||||
String childType = null;
|
String childType = null;
|
||||||
ScoreType scoreType = null;
|
ScoreType scoreType = null;
|
||||||
String executionType = "uid";
|
|
||||||
|
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
|
@ -82,8 +81,6 @@ public class HasChildQueryParser implements QueryParser {
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
|
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
|
||||||
childType = parser.text();
|
childType = parser.text();
|
||||||
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {// This option is experimental and will most likely be removed.
|
|
||||||
executionType = parser.text();
|
|
||||||
} else if ("_scope".equals(currentFieldName)) {
|
} else if ("_scope".equals(currentFieldName)) {
|
||||||
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] query has been removed, use a filter as a facet_filter in the relevant global facet");
|
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_child] query has been removed, use a filter as a facet_filter in the relevant global facet");
|
||||||
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
|
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
|
||||||
|
@ -128,7 +125,7 @@ public class HasChildQueryParser implements QueryParser {
|
||||||
searchContext.addRewrite(childrenQuery);
|
searchContext.addRewrite(childrenQuery);
|
||||||
query = childrenQuery;
|
query = childrenQuery;
|
||||||
} else {
|
} else {
|
||||||
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, parentType, childType, searchContext, executionType);
|
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, parentType, childType, searchContext);
|
||||||
searchContext.addRewrite(hasChildFilter);
|
searchContext.addRewrite(hasChildFilter);
|
||||||
query = new ConstantScoreQuery(hasChildFilter);
|
query = new ConstantScoreQuery(hasChildFilter);
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ public class HasParentFilterBuilder extends BaseFilterBuilder {
|
||||||
private final FilterBuilder filterBuilder;
|
private final FilterBuilder filterBuilder;
|
||||||
private final String parentType;
|
private final String parentType;
|
||||||
private String filterName;
|
private String filterName;
|
||||||
private String executionType;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param parentType The parent type
|
* @param parentType The parent type
|
||||||
|
@ -59,16 +58,6 @@ public class HasParentFilterBuilder extends BaseFilterBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Expert: Sets the low level parent to child filtering implementation. Can be: 'bitset' or 'uid'
|
|
||||||
* <p/>
|
|
||||||
* This option is experimental and will be removed.
|
|
||||||
*/
|
|
||||||
public HasParentFilterBuilder executionType(String executionType) {
|
|
||||||
this.executionType = executionType;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(HasParentFilterParser.NAME);
|
builder.startObject(HasParentFilterParser.NAME);
|
||||||
|
@ -83,9 +72,6 @@ public class HasParentFilterBuilder extends BaseFilterBuilder {
|
||||||
if (filterName != null) {
|
if (filterName != null) {
|
||||||
builder.field("_name", filterName);
|
builder.field("_name", filterName);
|
||||||
}
|
}
|
||||||
if (executionType != null) {
|
|
||||||
builder.field("execution_type", executionType);
|
|
||||||
}
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,6 @@ public class HasParentFilterParser implements FilterParser {
|
||||||
Query query = null;
|
Query query = null;
|
||||||
boolean queryFound = false;
|
boolean queryFound = false;
|
||||||
String parentType = null;
|
String parentType = null;
|
||||||
String executionType = "uid";
|
|
||||||
|
|
||||||
String filterName = null;
|
String filterName = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
|
@ -93,8 +92,6 @@ public class HasParentFilterParser implements FilterParser {
|
||||||
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] filter has been removed, use a filter as a facet_filter in the relevant global facet");
|
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] filter has been removed, use a filter as a facet_filter in the relevant global facet");
|
||||||
} else if ("_name".equals(currentFieldName)) {
|
} else if ("_name".equals(currentFieldName)) {
|
||||||
filterName = parser.text();
|
filterName = parser.text();
|
||||||
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) { // This option is experimental and will most likely be removed.
|
|
||||||
executionType = parser.text();
|
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext.index(), "[has_parent] filter does not support [" + currentFieldName + "]");
|
throw new QueryParsingException(parseContext.index(), "[has_parent] filter does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
@ -121,7 +118,7 @@ public class HasParentFilterParser implements FilterParser {
|
||||||
|
|
||||||
SearchContext searchContext = SearchContext.current();
|
SearchContext searchContext = SearchContext.current();
|
||||||
|
|
||||||
HasParentFilter parentFilter = HasParentFilter.create(executionType, query, parentType, searchContext);
|
HasParentFilter parentFilter = HasParentFilter.create(query, parentType, searchContext);
|
||||||
searchContext.addRewrite(parentFilter);
|
searchContext.addRewrite(parentFilter);
|
||||||
|
|
||||||
if (filterName != null) {
|
if (filterName != null) {
|
||||||
|
|
|
@ -31,7 +31,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
|
||||||
private final QueryBuilder queryBuilder;
|
private final QueryBuilder queryBuilder;
|
||||||
private final String parentType;
|
private final String parentType;
|
||||||
private String scoreType;
|
private String scoreType;
|
||||||
private String executionType;
|
|
||||||
private float boost = 1.0f;
|
private float boost = 1.0f;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -56,17 +55,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Expert: Sets the low level child to parent filtering implementation. Can be: 'bitset' or 'uid'
|
|
||||||
* <p/>
|
|
||||||
* Only applicable when score_type is set to none.
|
|
||||||
* This option is experimental and will be removed.
|
|
||||||
*/
|
|
||||||
public HasParentQueryBuilder executionType(String executionType) {
|
|
||||||
this.executionType = executionType;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(HasParentQueryParser.NAME);
|
builder.startObject(HasParentQueryParser.NAME);
|
||||||
builder.field("query");
|
builder.field("query");
|
||||||
|
@ -75,9 +63,6 @@ public class HasParentQueryBuilder extends BaseQueryBuilder implements Boostable
|
||||||
if (scoreType != null) {
|
if (scoreType != null) {
|
||||||
builder.field("score_type", scoreType);
|
builder.field("score_type", scoreType);
|
||||||
}
|
}
|
||||||
if (executionType != null) {
|
|
||||||
builder.field("execution_type", executionType);
|
|
||||||
}
|
|
||||||
if (boost != 1.0f) {
|
if (boost != 1.0f) {
|
||||||
builder.field("boost", boost);
|
builder.field("boost", boost);
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,6 @@ public class HasParentQueryParser implements QueryParser {
|
||||||
float boost = 1.0f;
|
float boost = 1.0f;
|
||||||
String parentType = null;
|
String parentType = null;
|
||||||
boolean score = false;
|
boolean score = false;
|
||||||
String executionType = "uid";
|
|
||||||
|
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
|
@ -85,8 +84,6 @@ public class HasParentQueryParser implements QueryParser {
|
||||||
parentType = parser.text();
|
parentType = parser.text();
|
||||||
} else if ("_scope".equals(currentFieldName)) {
|
} else if ("_scope".equals(currentFieldName)) {
|
||||||
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] query has been removed, use a filter as a facet_filter in the relevant global facet");
|
throw new QueryParsingException(parseContext.index(), "the [_scope] support in [has_parent] query has been removed, use a filter as a facet_filter in the relevant global facet");
|
||||||
} else if ("execution_type".equals(currentFieldName) || "executionType".equals(currentFieldName)) {
|
|
||||||
executionType = parser.text();
|
|
||||||
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
|
} else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) {
|
||||||
String scoreTypeValue = parser.text();
|
String scoreTypeValue = parser.text();
|
||||||
if ("score".equals(scoreTypeValue)) {
|
if ("score".equals(scoreTypeValue)) {
|
||||||
|
@ -153,7 +150,7 @@ public class HasParentQueryParser implements QueryParser {
|
||||||
searchContext.addRewrite(parentQuery);
|
searchContext.addRewrite(parentQuery);
|
||||||
query = parentQuery;
|
query = parentQuery;
|
||||||
} else {
|
} else {
|
||||||
HasParentFilter hasParentFilter = HasParentFilter.create(executionType, innerQuery, parentType, searchContext);
|
HasParentFilter hasParentFilter = HasParentFilter.create(innerQuery, parentType, searchContext);
|
||||||
searchContext.addRewrite(hasParentFilter);
|
searchContext.addRewrite(hasParentFilter);
|
||||||
query = new ConstantScoreQuery(hasParentFilter);
|
query = new ConstantScoreQuery(hasParentFilter);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,112 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elastic Search and Shay Banon under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. Elastic Search licenses this
|
|
||||||
* file to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.search.child;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReader;
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
|
||||||
import org.apache.lucene.search.Collector;
|
|
||||||
import org.apache.lucene.search.Scorer;
|
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
|
||||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class ChildCollector extends Collector {
|
|
||||||
|
|
||||||
private final String parentType;
|
|
||||||
|
|
||||||
private final SearchContext context;
|
|
||||||
|
|
||||||
private final Tuple<AtomicReader, IdReaderTypeCache>[] readers;
|
|
||||||
|
|
||||||
private final Map<Object, FixedBitSet> parentDocs;
|
|
||||||
|
|
||||||
private IdReaderTypeCache typeCache;
|
|
||||||
|
|
||||||
public ChildCollector(String parentType, SearchContext context) {
|
|
||||||
this.parentType = parentType;
|
|
||||||
this.context = context;
|
|
||||||
this.parentDocs = new HashMap<Object, FixedBitSet>();
|
|
||||||
|
|
||||||
// create a specific type map lookup for faster lookup operations per doc
|
|
||||||
this.readers = new Tuple[context.searcher().getIndexReader().leaves().size()];
|
|
||||||
for (int i = 0; i < readers.length; i++) {
|
|
||||||
AtomicReaderContext readerContext = context.searcher().getIndexReader().leaves().get(i);
|
|
||||||
readers[i] = new Tuple<AtomicReader, IdReaderTypeCache>(
|
|
||||||
readerContext.reader(), context.idCache().reader(readerContext.reader()).type(parentType)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<Object, FixedBitSet> parentDocs() {
|
|
||||||
return this.parentDocs;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setScorer(Scorer scorer) throws IOException {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
if (typeCache == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
HashedBytesArray parentId = typeCache.parentIdByDoc(doc);
|
|
||||||
if (parentId == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for (Tuple<AtomicReader, IdReaderTypeCache> tuple : readers) {
|
|
||||||
AtomicReader indexReader = tuple.v1();
|
|
||||||
IdReaderTypeCache idReaderTypeCache = tuple.v2();
|
|
||||||
if (idReaderTypeCache == null) { // might be if we don't have that doc with that type in this reader
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
int parentDocId = idReaderTypeCache.docById(parentId);
|
|
||||||
if (parentDocId != -1 && (indexReader.getLiveDocs() == null || indexReader.getLiveDocs().get(parentDocId))) {
|
|
||||||
FixedBitSet docIdSet = parentDocs().get(indexReader.getCoreCacheKey());
|
|
||||||
if (docIdSet == null) {
|
|
||||||
docIdSet = new FixedBitSet(indexReader.maxDoc());
|
|
||||||
parentDocs.put(indexReader.getCoreCacheKey(), docIdSet);
|
|
||||||
}
|
|
||||||
docIdSet.set(parentDocId);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
|
||||||
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean acceptsDocsOutOfOrder() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
import org.elasticsearch.common.CacheRecycler;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
@ -37,7 +36,6 @@ import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -63,47 +61,8 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static HasChildFilter create(Query childQuery, String parentType, String childType, SearchContext searchContext, String executionType) {
|
public static HasChildFilter create(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
||||||
// This mechanism is experimental and will most likely be removed.
|
return new Uid(childQuery, parentType, childType, searchContext);
|
||||||
if ("bitset".equals(executionType)) {
|
|
||||||
return new Bitset(childQuery, parentType, childType, searchContext);
|
|
||||||
} else if ("uid".endsWith(executionType)) {
|
|
||||||
return new Uid(childQuery, parentType, childType, searchContext);
|
|
||||||
}
|
|
||||||
throw new ElasticSearchIllegalStateException("Illegal has_child execution type: " + executionType);
|
|
||||||
}
|
|
||||||
|
|
||||||
static class Bitset extends HasChildFilter {
|
|
||||||
|
|
||||||
private Map<Object, FixedBitSet> parentDocs;
|
|
||||||
|
|
||||||
public Bitset(Query childQuery, String parentType, String childType, SearchContext searchContext) {
|
|
||||||
super(childQuery, parentType, childType, searchContext);
|
|
||||||
}
|
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
|
||||||
if (parentDocs == null) {
|
|
||||||
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
|
|
||||||
}
|
|
||||||
|
|
||||||
// np need to use acceptDocs, since the parentDocs were collected with a collector, which means those
|
|
||||||
// collected docs are not deleted
|
|
||||||
// ok to return null
|
|
||||||
return parentDocs.get(context.reader().getCoreCacheKey());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
|
||||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
|
||||||
ChildCollector collector = new ChildCollector(parentType, searchContext);
|
|
||||||
searchContext.searcher().search(childQuery, collector);
|
|
||||||
this.parentDocs = collector.parentDocs();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void contextClear() {
|
|
||||||
parentDocs = null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class Uid extends HasChildFilter {
|
static class Uid extends HasChildFilter {
|
||||||
|
|
|
@ -20,28 +20,22 @@
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import gnu.trove.set.hash.THashSet;
|
import gnu.trove.set.hash.THashSet;
|
||||||
import org.apache.lucene.index.AtomicReader;
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
import org.elasticsearch.common.CacheRecycler;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
|
||||||
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
|
||||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static com.google.common.collect.Maps.newHashMap;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
|
* A filter that only return child documents that are linked to the parent documents that matched with the inner query.
|
||||||
|
@ -65,14 +59,8 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static HasParentFilter create(String executionType, Query query, String parentType, SearchContext context) {
|
public static HasParentFilter create(Query query, String parentType, SearchContext context) {
|
||||||
// This mechanism is experimental and will most likely be removed.
|
return new Uid(query, parentType, context);
|
||||||
if ("bitset".equals(executionType)) {
|
|
||||||
return new Bitset(query, parentType, context);
|
|
||||||
} else if ("uid".equals(executionType)) {
|
|
||||||
return new Uid(query, parentType, context);
|
|
||||||
}
|
|
||||||
throw new ElasticSearchIllegalStateException("Illegal has_parent execution type: " + executionType);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class Uid extends HasParentFilter {
|
static class Uid extends HasParentFilter {
|
||||||
|
@ -162,101 +150,5 @@ public abstract class HasParentFilter extends Filter implements SearchContext.Re
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class Bitset extends HasParentFilter {
|
|
||||||
|
|
||||||
Map<Object, FixedBitSet> parentDocs;
|
|
||||||
|
|
||||||
Bitset(Query query, String parentType, SearchContext context) {
|
|
||||||
super(query, parentType, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
|
||||||
if (parentDocs == null) {
|
|
||||||
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
|
|
||||||
}
|
|
||||||
|
|
||||||
IdReaderTypeCache currentTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
|
||||||
if (currentTypeCache == null) {
|
|
||||||
return null;
|
|
||||||
} else {
|
|
||||||
return new ChildrenDocSet(readerContext.reader(), currentTypeCache, acceptDocs, parentDocs, context, parentType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void contextRewrite(SearchContext searchContext) throws Exception {
|
|
||||||
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
|
|
||||||
ParentDocsCollector collector = new ParentDocsCollector();
|
|
||||||
searchContext.searcher().search(parentQuery, collector);
|
|
||||||
parentDocs = collector.segmentResults;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void contextClear() {
|
|
||||||
parentDocs = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static class ChildrenDocSet extends MatchDocIdSet {
|
|
||||||
|
|
||||||
final IdReaderTypeCache currentTypeCache;
|
|
||||||
final AtomicReader currentReader;
|
|
||||||
final Tuple<AtomicReader, IdReaderTypeCache>[] readersToTypeCache;
|
|
||||||
final Map<Object, FixedBitSet> parentDocs;
|
|
||||||
|
|
||||||
ChildrenDocSet(AtomicReader currentReader, IdReaderTypeCache currentTypeCache, @Nullable Bits acceptDocs,
|
|
||||||
Map<Object, FixedBitSet> parentDocs, SearchContext context, String parentType) {
|
|
||||||
super(currentReader.maxDoc(), acceptDocs);
|
|
||||||
this.currentTypeCache = currentTypeCache;
|
|
||||||
this.currentReader = currentReader;
|
|
||||||
this.parentDocs = parentDocs;
|
|
||||||
this.readersToTypeCache = new Tuple[context.searcher().getIndexReader().leaves().size()];
|
|
||||||
for (int i = 0; i < readersToTypeCache.length; i++) {
|
|
||||||
AtomicReader reader = context.searcher().getIndexReader().leaves().get(i).reader();
|
|
||||||
readersToTypeCache[i] = new Tuple<AtomicReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean matchDoc(int doc) {
|
|
||||||
if (doc == -1) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
HashedBytesArray parentId = currentTypeCache.parentIdByDoc(doc);
|
|
||||||
if (parentId == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (Tuple<AtomicReader, IdReaderTypeCache> readerTypeCacheTuple : readersToTypeCache) {
|
|
||||||
int parentDocId = readerTypeCacheTuple.v2().docById(parentId);
|
|
||||||
if (parentDocId == -1) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
FixedBitSet currentParentDocs = parentDocs.get(readerTypeCacheTuple.v1().getCoreCacheKey());
|
|
||||||
if (currentParentDocs.get(parentDocId)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static class ParentDocsCollector extends NoopCollector {
|
|
||||||
|
|
||||||
final Map<Object, FixedBitSet> segmentResults = newHashMap();
|
|
||||||
FixedBitSet current;
|
|
||||||
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
current.set(doc);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
|
||||||
segmentResults.put(context.reader().getCoreCacheKey(), current = new FixedBitSet(context.reader().maxDoc()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -142,129 +142,122 @@ public class ChildSearchBenchmark {
|
||||||
System.out.println("--> Committed heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapCommitted());
|
System.out.println("--> Committed heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapCommitted());
|
||||||
System.out.println("--> Used heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapUsed());
|
System.out.println("--> Used heap size: " + statsResponse.nodes()[0].getJvm().getMem().getHeapUsed());
|
||||||
|
|
||||||
String[] executionTypes = new String[]{"uid"/*, "bitset"*/};// either uid (faster, in general a bit more memory) or bitset (slower, but in general a bit less memory)
|
// run parent child constant query
|
||||||
for (String executionType : executionTypes) {
|
for (int j = 0; j < QUERY_WARMUP; j++) {
|
||||||
System.out.println("--> Running has_child filter with execution type " + executionType);
|
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||||
// run parent child constant query
|
.setQuery(
|
||||||
for (int j = 0; j < QUERY_WARMUP; j++) {
|
filteredQuery(
|
||||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
matchAllQuery(),
|
||||||
.setQuery(
|
hasChildFilter("child", termQuery("tag", "tag1"))
|
||||||
filteredQuery(
|
)
|
||||||
matchAllQuery(),
|
)
|
||||||
hasChildFilter("child", termQuery("tag", "tag1")).executionType(executionType)
|
.execute().actionGet();
|
||||||
)
|
if (searchResponse.failedShards() > 0) {
|
||||||
)
|
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
||||||
.execute().actionGet();
|
|
||||||
if (searchResponse.failedShards() > 0) {
|
|
||||||
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
|
||||||
}
|
|
||||||
if (searchResponse.hits().totalHits() != COUNT) {
|
|
||||||
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + COUNT + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if (searchResponse.hits().totalHits() != COUNT) {
|
||||||
totalQueryTime = 0;
|
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + COUNT + "]");
|
||||||
for (int j = 0; j < QUERY_COUNT; j++) {
|
|
||||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
|
||||||
.setQuery(
|
|
||||||
filteredQuery(
|
|
||||||
matchAllQuery(),
|
|
||||||
hasChildFilter("child", termQuery("tag", "tag1")).executionType(executionType)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.execute().actionGet();
|
|
||||||
if (searchResponse.failedShards() > 0) {
|
|
||||||
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
|
||||||
}
|
|
||||||
if (searchResponse.hits().totalHits() != COUNT) {
|
|
||||||
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + COUNT + "]");
|
|
||||||
}
|
|
||||||
totalQueryTime += searchResponse.tookInMillis();
|
|
||||||
}
|
}
|
||||||
System.out.println("--> has_child[" + executionType + "] filter Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
|
||||||
|
|
||||||
System.out.println("--> Running has_child[" + executionType + "] filter with match_all child query");
|
|
||||||
totalQueryTime = 0;
|
|
||||||
for (int j = 1; j <= QUERY_COUNT; j++) {
|
|
||||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
|
||||||
.setQuery(
|
|
||||||
filteredQuery(
|
|
||||||
matchAllQuery(),
|
|
||||||
hasChildFilter("child", matchAllQuery()).executionType(executionType)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.execute().actionGet();
|
|
||||||
if (searchResponse.failedShards() > 0) {
|
|
||||||
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
|
||||||
}
|
|
||||||
long expected = (COUNT / BATCH) * BATCH;
|
|
||||||
if (searchResponse.hits().totalHits() != expected) {
|
|
||||||
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + expected + "]");
|
|
||||||
}
|
|
||||||
totalQueryTime += searchResponse.tookInMillis();
|
|
||||||
}
|
|
||||||
System.out.println("--> has_child[" + executionType + "] filter with match_all child query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (String executionType : executionTypes) {
|
totalQueryTime = 0;
|
||||||
System.out.println("--> Running has_parent filter with " + executionType + " execution type");
|
for (int j = 0; j < QUERY_COUNT; j++) {
|
||||||
// run parent child constant query
|
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||||
for (int j = 0; j < QUERY_WARMUP; j++) {
|
.setQuery(
|
||||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
filteredQuery(
|
||||||
.setQuery(
|
matchAllQuery(),
|
||||||
filteredQuery(
|
hasChildFilter("child", termQuery("tag", "tag1"))
|
||||||
matchAllQuery(),
|
)
|
||||||
hasParentFilter("parent", termQuery("name", "test1")).executionType(executionType)
|
)
|
||||||
)
|
.execute().actionGet();
|
||||||
)
|
if (searchResponse.failedShards() > 0) {
|
||||||
.execute().actionGet();
|
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
||||||
if (searchResponse.failedShards() > 0) {
|
|
||||||
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
|
||||||
}
|
|
||||||
if (searchResponse.hits().totalHits() != CHILD_COUNT) {
|
|
||||||
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + CHILD_COUNT + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if (searchResponse.hits().totalHits() != COUNT) {
|
||||||
totalQueryTime = 0;
|
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + COUNT + "]");
|
||||||
for (int j = 1; j <= QUERY_COUNT; j++) {
|
|
||||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
|
||||||
.setQuery(
|
|
||||||
filteredQuery(
|
|
||||||
matchAllQuery(),
|
|
||||||
hasParentFilter("parent", termQuery("name", "test1")).executionType(executionType)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.execute().actionGet();
|
|
||||||
if (searchResponse.failedShards() > 0) {
|
|
||||||
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
|
||||||
}
|
|
||||||
if (searchResponse.hits().totalHits() != CHILD_COUNT) {
|
|
||||||
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + CHILD_COUNT + "]");
|
|
||||||
}
|
|
||||||
totalQueryTime += searchResponse.tookInMillis();
|
|
||||||
}
|
}
|
||||||
System.out.println("--> has_parent[" + executionType + "] filter Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
totalQueryTime += searchResponse.tookInMillis();
|
||||||
|
|
||||||
System.out.println("--> Running has_parent[" + executionType + "] filter with match_all parent query ");
|
|
||||||
totalQueryTime = 0;
|
|
||||||
for (int j = 1; j <= QUERY_COUNT; j++) {
|
|
||||||
SearchResponse searchResponse = client.prepareSearch(indexName)
|
|
||||||
.setQuery(filteredQuery(
|
|
||||||
matchAllQuery(),
|
|
||||||
hasParentFilter("parent", matchAllQuery()).executionType(executionType)
|
|
||||||
))
|
|
||||||
.execute().actionGet();
|
|
||||||
if (searchResponse.failedShards() > 0) {
|
|
||||||
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
|
||||||
}
|
|
||||||
if (searchResponse.hits().totalHits() != 5000000) {
|
|
||||||
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + 5000000 + "]");
|
|
||||||
}
|
|
||||||
totalQueryTime += searchResponse.tookInMillis();
|
|
||||||
}
|
|
||||||
System.out.println("--> has_parent[" + executionType + "] filter with match_all parent query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
|
||||||
}
|
}
|
||||||
|
System.out.println("--> has_child filter Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
||||||
|
|
||||||
|
System.out.println("--> Running has_child filter with match_all child query");
|
||||||
|
totalQueryTime = 0;
|
||||||
|
for (int j = 1; j <= QUERY_COUNT; j++) {
|
||||||
|
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||||
|
.setQuery(
|
||||||
|
filteredQuery(
|
||||||
|
matchAllQuery(),
|
||||||
|
hasChildFilter("child", matchAllQuery())
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.execute().actionGet();
|
||||||
|
if (searchResponse.failedShards() > 0) {
|
||||||
|
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
||||||
|
}
|
||||||
|
long expected = (COUNT / BATCH) * BATCH;
|
||||||
|
if (searchResponse.hits().totalHits() != expected) {
|
||||||
|
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + expected + "]");
|
||||||
|
}
|
||||||
|
totalQueryTime += searchResponse.tookInMillis();
|
||||||
|
}
|
||||||
|
System.out.println("--> has_child filter with match_all child query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
||||||
|
|
||||||
|
// run parent child constant query
|
||||||
|
for (int j = 0; j < QUERY_WARMUP; j++) {
|
||||||
|
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||||
|
.setQuery(
|
||||||
|
filteredQuery(
|
||||||
|
matchAllQuery(),
|
||||||
|
hasParentFilter("parent", termQuery("name", "test1"))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.execute().actionGet();
|
||||||
|
if (searchResponse.failedShards() > 0) {
|
||||||
|
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
||||||
|
}
|
||||||
|
if (searchResponse.hits().totalHits() != CHILD_COUNT) {
|
||||||
|
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + CHILD_COUNT + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
totalQueryTime = 0;
|
||||||
|
for (int j = 1; j <= QUERY_COUNT; j++) {
|
||||||
|
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||||
|
.setQuery(
|
||||||
|
filteredQuery(
|
||||||
|
matchAllQuery(),
|
||||||
|
hasParentFilter("parent", termQuery("name", "test1"))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.execute().actionGet();
|
||||||
|
if (searchResponse.failedShards() > 0) {
|
||||||
|
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
||||||
|
}
|
||||||
|
if (searchResponse.hits().totalHits() != CHILD_COUNT) {
|
||||||
|
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + CHILD_COUNT + "]");
|
||||||
|
}
|
||||||
|
totalQueryTime += searchResponse.tookInMillis();
|
||||||
|
}
|
||||||
|
System.out.println("--> has_parent filter Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
||||||
|
|
||||||
|
System.out.println("--> Running has_parent filter with match_all parent query ");
|
||||||
|
totalQueryTime = 0;
|
||||||
|
for (int j = 1; j <= QUERY_COUNT; j++) {
|
||||||
|
SearchResponse searchResponse = client.prepareSearch(indexName)
|
||||||
|
.setQuery(filteredQuery(
|
||||||
|
matchAllQuery(),
|
||||||
|
hasParentFilter("parent", matchAllQuery())
|
||||||
|
))
|
||||||
|
.execute().actionGet();
|
||||||
|
if (searchResponse.failedShards() > 0) {
|
||||||
|
System.err.println("Search Failures " + Arrays.toString(searchResponse.shardFailures()));
|
||||||
|
}
|
||||||
|
if (searchResponse.hits().totalHits() != 5000000) {
|
||||||
|
System.err.println("--> mismatch on hits [" + j + "], got [" + searchResponse.hits().totalHits() + "], expected [" + 5000000 + "]");
|
||||||
|
}
|
||||||
|
totalQueryTime += searchResponse.tookInMillis();
|
||||||
|
}
|
||||||
|
System.out.println("--> has_parent filter with match_all parent query, Query Avg: " + (totalQueryTime / QUERY_COUNT) + "ms");
|
||||||
System.out.println("--> Running top_children query");
|
System.out.println("--> Running top_children query");
|
||||||
// run parent child score query
|
// run parent child score query
|
||||||
for (int j = 0; j < QUERY_WARMUP; j++) {
|
for (int j = 0; j < QUERY_WARMUP; j++) {
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to ElasticSearch and Shay Banon under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. ElasticSearch licenses this
|
|
||||||
* file to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.test.integration.search.child;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Runs super class tests, but with bitset execution type.
|
|
||||||
*/
|
|
||||||
public class BitsetExecutionChildQuerySearchTests extends SimpleChildQuerySearchTests {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected String getExecutionMethod() {
|
|
||||||
return "bitset";
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -70,10 +70,6 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
return client("node1");
|
return client("node1");
|
||||||
}
|
}
|
||||||
|
|
||||||
protected String getExecutionMethod() {
|
|
||||||
return "uid";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void multiLevelChild() throws Exception {
|
public void multiLevelChild() throws Exception {
|
||||||
client.admin().indices().prepareDelete().execute().actionGet();
|
client.admin().indices().prepareDelete().execute().actionGet();
|
||||||
|
@ -104,8 +100,8 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
matchAllQuery(),
|
matchAllQuery(),
|
||||||
hasChildFilter(
|
hasChildFilter(
|
||||||
"child",
|
"child",
|
||||||
filteredQuery(termQuery("c_field", "c_value1"), hasChildFilter("grandchild", termQuery("gc_field", "gc_value1")).executionType(getExecutionMethod()))
|
filteredQuery(termQuery("c_field", "c_value1"), hasChildFilter("grandchild", termQuery("gc_field", "gc_value1")))
|
||||||
).executionType(getExecutionMethod())
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
@ -243,13 +239,13 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
|
|
||||||
// HAS CHILD FILTER
|
// HAS CHILD FILTER
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")))).execute().actionGet();
|
||||||
if (searchResponse.failedShards() > 0) {
|
if (searchResponse.failedShards() > 0) {
|
||||||
logger.warn("Failed shards:");
|
logger.warn("Failed shards:");
|
||||||
for (ShardSearchFailure shardSearchFailure : searchResponse.shardFailures()) {
|
for (ShardSearchFailure shardSearchFailure : searchResponse.shardFailures()) {
|
||||||
|
@ -260,7 +256,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||||
|
@ -268,14 +264,14 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(searchResponse.hits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(searchResponse.hits().getAt(1).id(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
|
|
||||||
// HAS PARENT FILTER
|
// HAS PARENT FILTER
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "p_value2")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "p_value2")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("c3"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("c3"));
|
||||||
assertThat(searchResponse.hits().getAt(1).id(), equalTo("c4"));
|
assertThat(searchResponse.hits().getAt(1).id(), equalTo("c4"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "p_value1")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", "p_value1")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||||
|
@ -344,7 +340,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(parentToChildren.isEmpty(), equalTo(false));
|
assertThat(parentToChildren.isEmpty(), equalTo(false));
|
||||||
for (Map.Entry<String, List<String>> parentToChildrenEntry : parentToChildren.entrySet()) {
|
for (Map.Entry<String, List<String>> parentToChildrenEntry : parentToChildren.entrySet()) {
|
||||||
SearchResponse searchResponse = client.prepareSearch("test")
|
SearchResponse searchResponse = client.prepareSearch("test")
|
||||||
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", parentToChildrenEntry.getKey())).executionType(getExecutionMethod())))
|
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field", parentToChildrenEntry.getKey()))))
|
||||||
.setSize(numChildDocsPerParent)
|
.setSize(numChildDocsPerParent)
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
|
@ -439,19 +435,19 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
|
|
||||||
// HAS CHILD FILTER
|
// HAS CHILD FILTER
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||||
|
@ -538,19 +534,19 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
|
|
||||||
// HAS CHILD FILTER
|
// HAS CHILD FILTER
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "blue")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p2"));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "red")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||||
|
@ -648,7 +644,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
||||||
assertThat(searchResponse.hits().getAt(0).sourceAsString(), containsString("\"p_value1\""));
|
assertThat(searchResponse.hits().getAt(0).sourceAsString(), containsString("\"p_value1\""));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
|
@ -667,7 +663,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
assertThat(searchResponse.hits().getAt(0).id(), equalTo("p1"));
|
||||||
assertThat(searchResponse.hits().getAt(0).sourceAsString(), containsString("\"p_value1_updated\""));
|
assertThat(searchResponse.hits().getAt(0).sourceAsString(), containsString("\"p_value1_updated\""));
|
||||||
|
|
||||||
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")).executionType(getExecutionMethod()))).execute().actionGet();
|
searchResponse = client.prepareSearch("test").setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field", "yellow")))).execute().actionGet();
|
||||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||||
assertThat(searchResponse.failedShards(), equalTo(0));
|
assertThat(searchResponse.failedShards(), equalTo(0));
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||||
|
@ -882,13 +878,13 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent query hasn't executed properly"), equalTo(true));
|
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent query hasn't executed properly"), equalTo(true));
|
||||||
|
|
||||||
countResponse = client.prepareCount("test")
|
countResponse = client.prepareCount("test")
|
||||||
.setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field1", "2")).executionType(getExecutionMethod())))
|
.setQuery(constantScoreQuery(hasChildFilter("child", termQuery("c_field1", "2"))))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
assertThat(countResponse.failedShards(), equalTo(1));
|
assertThat(countResponse.failedShards(), equalTo(1));
|
||||||
assertThat(countResponse.shardFailures().get(0).reason().contains("has_child filter hasn't executed properly"), equalTo(true));
|
assertThat(countResponse.shardFailures().get(0).reason().contains("has_child filter hasn't executed properly"), equalTo(true));
|
||||||
|
|
||||||
countResponse = client.prepareCount("test")
|
countResponse = client.prepareCount("test")
|
||||||
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field1", "1")).executionType(getExecutionMethod())))
|
.setQuery(constantScoreQuery(hasParentFilter("parent", termQuery("p_field1", "1"))))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
assertThat(countResponse.failedShards(), equalTo(1));
|
assertThat(countResponse.failedShards(), equalTo(1));
|
||||||
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent filter hasn't executed properly"), equalTo(true));
|
assertThat(countResponse.shardFailures().get(0).reason().contains("has_parent filter hasn't executed properly"), equalTo(true));
|
||||||
|
@ -1120,7 +1116,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
client.prepareSearch("test")
|
client.prepareSearch("test")
|
||||||
.setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).executionType(getExecutionMethod()))
|
.setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
assertThat(response.failedShards(), equalTo(0));
|
assertThat(response.failedShards(), equalTo(0));
|
||||||
assertThat(response.hits().totalHits(), equalTo(0l));
|
assertThat(response.hits().totalHits(), equalTo(0l));
|
||||||
|
@ -1132,7 +1128,7 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
||||||
assertThat(response.hits().totalHits(), equalTo(0l));
|
assertThat(response.hits().totalHits(), equalTo(0l));
|
||||||
|
|
||||||
client.prepareSearch("test")
|
client.prepareSearch("test")
|
||||||
.setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).executionType(getExecutionMethod()))
|
.setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
assertThat(response.failedShards(), equalTo(0));
|
assertThat(response.failedShards(), equalTo(0));
|
||||||
assertThat(response.hits().totalHits(), equalTo(0l));
|
assertThat(response.hits().totalHits(), equalTo(0l));
|
||||||
|
|
Loading…
Reference in New Issue