Merge pull request #13376 from martijnvg/remove_pre2x_slow_parent_child

Removed pre 2.x parent child implementation
This commit is contained in:
Martijn van Groningen 2015-09-09 15:20:10 +02:00
commit 73d84e4797
32 changed files with 117 additions and 4862 deletions

View File

@ -399,7 +399,7 @@ public class MetaDataMappingService extends AbstractComponent {
// For example in MapperService we can't distinguish between a create index api call // For example in MapperService we can't distinguish between a create index api call
// and a put mapping api call, so we don't which type did exist before. // and a put mapping api call, so we don't which type did exist before.
// Also the order of the mappings may be backwards. // Also the order of the mappings may be backwards.
if (Version.indexCreated(indexService.getIndexSettings()).onOrAfter(Version.V_2_0_0_beta1) && newMapper.parentFieldMapper().active()) { if (newMapper.parentFieldMapper().active()) {
IndexMetaData indexMetaData = currentState.metaData().index(index); IndexMetaData indexMetaData = currentState.metaData().index(index);
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.mappings().values()) { for (ObjectCursor<MappingMetaData> mapping : indexMetaData.mappings().values()) {
if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) { if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) {

View File

@ -245,16 +245,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
} }
fieldDataCaches.put(fieldNames.indexName(), cache); fieldDataCaches.put(fieldNames.indexName(), cache);
} }
// Remove this in 3.0
final boolean isOldParentField = ParentFieldMapper.NAME.equals(fieldNames.indexName())
&& Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
if (isOldParentField) {
if (parentIndexFieldData == null) {
parentIndexFieldData = builder.build(index, indexSettings, fieldType, cache, circuitBreakerService, mapperService);
}
return (IFD) parentIndexFieldData;
}
} }
return (IFD) builder.build(index, indexSettings, fieldType, cache, circuitBreakerService, mapperService); return (IFD) builder.build(index, indexSettings, fieldType, cache, circuitBreakerService, mapperService);

View File

@ -1,93 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.search.MultiValueMode;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
*/
public class ParentChildAtomicFieldData extends AbstractAtomicParentChildFieldData {
private final ImmutableOpenMap<String, AtomicOrdinalsFieldData> typeToIds;
private final long memorySizeInBytes;
public ParentChildAtomicFieldData(ImmutableOpenMap<String, AtomicOrdinalsFieldData> typeToIds) {
this.typeToIds = typeToIds;
long size = 0;
for (ObjectCursor<AtomicOrdinalsFieldData> cursor : typeToIds.values()) {
size += cursor.value.ramBytesUsed();
}
this.memorySizeInBytes = size;
}
@Override
public long ramBytesUsed() {
return memorySizeInBytes;
}
@Override
public Collection<Accountable> getChildResources() {
// TODO: should we break down by type?
// the current 'map' does not impl java.util.Map so we cant use Accountables.namedAccountables...
return Collections.emptyList();
}
@Override
public Set<String> types() {
final Set<String> types = new HashSet<>();
for (ObjectCursor<String> cursor : typeToIds.keys()) {
types.add(cursor.value);
}
return types;
}
@Override
public SortedDocValues getOrdinalsValues(String type) {
AtomicOrdinalsFieldData atomicFieldData = typeToIds.get(type);
if (atomicFieldData != null) {
return MultiValueMode.MIN.select(atomicFieldData.getOrdinalsValues());
} else {
return DocValues.emptySorted();
}
}
public AtomicOrdinalsFieldData getAtomicFieldData(String type) {
return typeToIds.get(type);
}
@Override
public void close() {
for (ObjectCursor<AtomicOrdinalsFieldData> cursor : typeToIds.values()) {
cursor.value.close();
}
}
}

View File

@ -1,84 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.Uid;
import java.io.IOException;
import java.util.NavigableSet;
/**
* Only emits terms that exist in the parentTypes set.
*
* @elasticsearch.internal
*/
final class ParentChildFilteredTermsEnum extends FilteredTermsEnum {
private final NavigableSet<BytesRef> parentTypes;
private BytesRef seekTerm;
private String type;
private BytesRef id;
ParentChildFilteredTermsEnum(TermsEnum tenum, NavigableSet<BytesRef> parentTypes) {
super(tenum, true);
this.parentTypes = parentTypes;
this.seekTerm = parentTypes.isEmpty() ? null : parentTypes.first();
}
@Override
protected BytesRef nextSeekTerm(BytesRef currentTerm) throws IOException {
BytesRef temp = seekTerm;
seekTerm = null;
return temp;
}
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
if (parentTypes.isEmpty()) {
return AcceptStatus.END;
}
BytesRef[] typeAndId = Uid.splitUidIntoTypeAndId(term);
if (parentTypes.contains(typeAndId[0])) {
type = typeAndId[0].utf8ToString();
id = typeAndId[1];
return AcceptStatus.YES;
} else {
BytesRef nextType = parentTypes.ceiling(typeAndId[0]);
if (nextType == null) {
return AcceptStatus.END;
}
seekTerm = nextType;
return AcceptStatus.NO_AND_SEEK;
}
}
public String type() {
return type;
}
public BytesRef id() {
return id;
}
}

View File

@ -67,36 +67,24 @@ import java.util.concurrent.TimeUnit;
* ParentChildIndexFieldData is responsible for loading the id cache mapping * ParentChildIndexFieldData is responsible for loading the id cache mapping
* needed for has_child and has_parent queries into memory. * needed for has_child and has_parent queries into memory.
*/ */
public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicParentChildFieldData> implements IndexParentChildFieldData, DocumentTypeListener { public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicParentChildFieldData> implements IndexParentChildFieldData {
private final NavigableSet<String> parentTypes; private final Set<String> parentTypes;
private final CircuitBreakerService breakerService; private final CircuitBreakerService breakerService;
// If child type (a type with _parent field) is added or removed, we want to make sure modifications don't happen
// while loading.
private final Object lock = new Object();
public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService, FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
CircuitBreakerService breakerService) { CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache); super(index, indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService; this.breakerService = breakerService;
if (Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1)) { Set<String> parentTypes = new HashSet<>();
parentTypes = new TreeSet<>();
for (DocumentMapper documentMapper : mapperService.docMappers(false)) {
beforeCreate(documentMapper);
}
mapperService.addTypeListener(this);
} else {
ImmutableSortedSet.Builder<String> builder = ImmutableSortedSet.naturalOrder();
for (DocumentMapper mapper : mapperService.docMappers(false)) { for (DocumentMapper mapper : mapperService.docMappers(false)) {
ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper(); ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper();
if (parentFieldMapper.active()) { if (parentFieldMapper.active()) {
builder.add(parentFieldMapper.type()); parentTypes.add(parentFieldMapper.type());
} }
} }
parentTypes = builder.build(); this.parentTypes = parentTypes;
}
} }
@Override @Override
@ -106,7 +94,6 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
@Override @Override
public AtomicParentChildFieldData load(LeafReaderContext context) { public AtomicParentChildFieldData load(LeafReaderContext context) {
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1)) {
final LeafReader reader = context.reader(); final LeafReader reader = context.reader();
return new AbstractAtomicParentChildFieldData() { return new AbstractAtomicParentChildFieldData() {
@ -138,126 +125,16 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
public void close() throws ElasticsearchException { public void close() throws ElasticsearchException {
} }
}; };
} else {
try {
return cache.load(context, this);
} catch (Throwable e) {
if (e instanceof ElasticsearchException) {
throw (ElasticsearchException) e;
} else {
throw new ElasticsearchException(e.getMessage(), e);
}
}
}
} }
@Override @Override
public AbstractAtomicParentChildFieldData loadDirect(LeafReaderContext context) throws Exception { public AbstractAtomicParentChildFieldData loadDirect(LeafReaderContext context) throws Exception {
// Make this method throw an UnsupportedOperationException in 3.0, only throw new UnsupportedOperationException();
// needed for indices created BEFORE 2.0
LeafReader reader = context.reader();
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
"acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO
);
final NavigableSet<BytesRef> parentTypes = new TreeSet<>();
synchronized (lock) {
for (String parentType : this.parentTypes) {
parentTypes.add(new BytesRef(parentType));
}
}
boolean success = false;
ParentChildAtomicFieldData data = null;
ParentChildFilteredTermsEnum termsEnum = new ParentChildFilteredTermsEnum(
new ParentChildIntersectTermsEnum(reader, UidFieldMapper.NAME, ParentFieldMapper.NAME),
parentTypes
);
ParentChildEstimator estimator = new ParentChildEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), termsEnum);
TermsEnum estimatedTermsEnum = estimator.beforeLoad(null);
ObjectObjectHashMap<String, TypeBuilder> typeBuilders = new ObjectObjectHashMap<>();
try {
try {
PostingsEnum docsEnum = null;
for (BytesRef term = estimatedTermsEnum.next(); term != null; term = estimatedTermsEnum.next()) {
// Usually this would be estimatedTermsEnum, but the
// abstract TermsEnum class does not support the .type()
// and .id() methods, so we skip using the wrapped
// TermsEnum and delegate directly to the
// ParentChildFilteredTermsEnum that was originally wrapped
String type = termsEnum.type();
TypeBuilder typeBuilder = typeBuilders.get(type);
if (typeBuilder == null) {
typeBuilders.put(type, typeBuilder = new TypeBuilder(acceptableTransientOverheadRatio, reader));
}
BytesRef id = termsEnum.id();
final long termOrd = typeBuilder.builder.nextOrdinal();
assert termOrd == typeBuilder.termOrdToBytesOffset.size();
typeBuilder.termOrdToBytesOffset.add(typeBuilder.bytes.copyUsingLengthPrefix(id));
docsEnum = estimatedTermsEnum.postings(docsEnum, PostingsEnum.NONE);
for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) {
typeBuilder.builder.addDoc(docId);
}
}
ImmutableOpenMap.Builder<String, AtomicOrdinalsFieldData> typeToAtomicFieldData = ImmutableOpenMap.builder(typeBuilders.size());
for (ObjectObjectCursor<String, TypeBuilder> cursor : typeBuilders) {
PagedBytes.Reader bytesReader = cursor.value.bytes.freeze(true);
final Ordinals ordinals = cursor.value.builder.build(fieldDataType.getSettings());
typeToAtomicFieldData.put(
cursor.key,
new PagedBytesAtomicFieldData(bytesReader, cursor.value.termOrdToBytesOffset.build(), ordinals)
);
}
data = new ParentChildAtomicFieldData(typeToAtomicFieldData.build());
} finally {
for (ObjectObjectCursor<String, TypeBuilder> cursor : typeBuilders) {
cursor.value.builder.close();
}
}
success = true;
return data;
} finally {
if (success) {
estimator.afterLoad(estimatedTermsEnum, data.ramBytesUsed());
} else {
estimator.afterLoad(estimatedTermsEnum, 0);
}
}
}
@Override
public void beforeCreate(DocumentMapper mapper) {
// Remove in 3.0
synchronized (lock) {
ParentFieldMapper parentFieldMapper = mapper.parentFieldMapper();
if (parentFieldMapper.active()) {
// A _parent field can never be added to an existing mapping, so a _parent field either exists on
// a new created or doesn't exists. This is why we can update the known parent types via DocumentTypeListener
if (parentTypes.add(parentFieldMapper.type())) {
clear();
}
}
}
} }
@Override @Override
protected AtomicParentChildFieldData empty(int maxDoc) { protected AtomicParentChildFieldData empty(int maxDoc) {
return new ParentChildAtomicFieldData(ImmutableOpenMap.<String, AtomicOrdinalsFieldData>of()); return AbstractAtomicParentChildFieldData.empty();
}
class TypeBuilder {
final PagedBytes bytes;
final PackedLongValues.Builder termOrdToBytesOffset;
final OrdinalsBuilder builder;
TypeBuilder(float acceptableTransientOverheadRatio, LeafReader reader) throws IOException {
bytes = new PagedBytes(15);
termOrdToBytesOffset = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
builder = new OrdinalsBuilder(-1, reader.maxDoc(), acceptableTransientOverheadRatio);
}
} }
public static class Builder implements IndexFieldData.Builder { public static class Builder implements IndexFieldData.Builder {
@ -271,55 +148,6 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
} }
} }
/**
* Estimator that wraps parent/child id field data by wrapping the data
* in a RamAccountingTermsEnum.
*/
public class ParentChildEstimator implements PerValueEstimator {
private final CircuitBreaker breaker;
private final TermsEnum filteredEnum;
// The TermsEnum is passed in here instead of being generated in the
// beforeLoad() function since it's filtered inside the previous
// TermsEnum wrappers
public ParentChildEstimator(CircuitBreaker breaker, TermsEnum filteredEnum) {
this.breaker = breaker;
this.filteredEnum = filteredEnum;
}
/**
* General overhead for ids is 2 times the length of the ID
*/
@Override
public long bytesPerValue(BytesRef term) {
if (term == null) {
return 0;
}
return 2 * term.length;
}
/**
* Wraps the already filtered {@link TermsEnum} in a
* {@link RamAccountingTermsEnum} and returns it
*/
@Override
public TermsEnum beforeLoad(Terms terms) throws IOException {
return new RamAccountingTermsEnum(filteredEnum, breaker, this, "parent/child id cache");
}
/**
* Adjusts the breaker based on the difference between the actual usage
* and the aggregated estimations.
*/
@Override
public void afterLoad(TermsEnum termsEnum, long actualUsed) {
assert termsEnum instanceof RamAccountingTermsEnum;
long estimatedBytes = ((RamAccountingTermsEnum) termsEnum).getTotalBytes();
breaker.addWithoutBreaking(-(estimatedBytes - actualUsed));
}
}
@Override @Override
public IndexParentChildFieldData loadGlobal(IndexReader indexReader) { public IndexParentChildFieldData loadGlobal(IndexReader indexReader) {
if (indexReader.leaves().size() <= 1) { if (indexReader.leaves().size() <= 1) {
@ -358,14 +186,6 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
@Override @Override
public IndexParentChildFieldData localGlobalDirect(IndexReader indexReader) throws Exception { public IndexParentChildFieldData localGlobalDirect(IndexReader indexReader) throws Exception {
final long startTime = System.nanoTime(); final long startTime = System.nanoTime();
final Set<String> parentTypes;
if (Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1)) {
synchronized (lock) {
parentTypes = ImmutableSet.copyOf(this.parentTypes);
}
} else {
parentTypes = this.parentTypes;
}
long ramBytesUsed = 0; long ramBytesUsed = 0;
final Map<String, OrdinalMapAndAtomicFieldData> perType = new HashMap<>(); final Map<String, OrdinalMapAndAtomicFieldData> perType = new HashMap<>();

View File

@ -1,335 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Intersects the terms and unions the doc ids for terms enum of multiple fields.
*
* @elasticsearch.internal
*/
final class ParentChildIntersectTermsEnum extends TermsEnum {
private final List<TermsEnumState> states;
private final IntArrayList stateSlots;
private BytesRef current;
ParentChildIntersectTermsEnum(LeafReader atomicReader, String... fields) throws IOException {
List<TermsEnum> fieldEnums = new ArrayList<>();
for (String field : fields) {
Terms terms = atomicReader.terms(field);
if (terms != null) {
fieldEnums.add(terms.iterator());
}
}
states = new ArrayList<>(fieldEnums.size());
for (TermsEnum tEnum : fieldEnums) {
states.add(new TermsEnumState(tEnum));
}
stateSlots = new IntArrayList(states.size());
}
@Override
public BytesRef term() throws IOException {
return current;
}
@Override
public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException {
int size = stateSlots.size();
assert size > 0;
if (size == 1) {
// Can't use 'reuse' since we don't know to which previous TermsEnum it belonged to.
return states.get(stateSlots.get(0)).termsEnum.postings(null, flags);
} else {
List<PostingsEnum> docsEnums = new ArrayList<>(stateSlots.size());
for (int i = 0; i < stateSlots.size(); i++) {
docsEnums.add(states.get(stateSlots.get(i)).termsEnum.postings(null, flags));
}
return new CompoundDocsEnum(docsEnums);
}
}
@Override
public BytesRef next() throws IOException {
if (states.isEmpty()) {
return null;
}
if (current == null) {
// unpositioned
for (TermsEnumState state : states) {
state.initialize();
}
} else {
int removed = 0;
for (int i = 0; i < stateSlots.size(); i++) {
int stateSlot = stateSlots.get(i);
if (states.get(stateSlot - removed).next() == null) {
states.remove(stateSlot - removed);
removed++;
}
}
if (states.isEmpty()) {
return null;
}
stateSlots.clear();
}
BytesRef lowestTerm = states.get(0).term;
stateSlots.add(0);
for (int i = 1; i < states.size(); i++) {
TermsEnumState state = states.get(i);
int cmp = lowestTerm.compareTo(state.term);
if (cmp > 0) {
lowestTerm = state.term;
stateSlots.clear();
stateSlots.add(i);
} else if (cmp == 0) {
stateSlots.add(i);
}
}
return current = lowestTerm;
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
if (states.isEmpty()) {
return SeekStatus.END;
}
boolean found = false;
if (current == null) {
// unpositioned
Iterator<TermsEnumState> iterator = states.iterator();
while (iterator.hasNext()) {
SeekStatus seekStatus = iterator.next().seekCeil(text);
if (seekStatus == SeekStatus.END) {
iterator.remove();
} else if (seekStatus == SeekStatus.FOUND) {
found = true;
}
}
} else {
int removed = 0;
for (int i = 0; i < stateSlots.size(); i++) {
int stateSlot = stateSlots.get(i);
SeekStatus seekStatus = states.get(stateSlot - removed).seekCeil(text);
if (seekStatus == SeekStatus.END) {
states.remove(stateSlot - removed);
removed++;
} else if (seekStatus == SeekStatus.FOUND) {
found = true;
}
}
}
if (states.isEmpty()) {
return SeekStatus.END;
}
stateSlots.clear();
if (found) {
for (int i = 0; i < states.size(); i++) {
if (states.get(i).term.equals(text)) {
stateSlots.add(i);
}
}
current = text;
return SeekStatus.FOUND;
} else {
BytesRef lowestTerm = states.get(0).term;
stateSlots.add(0);
for (int i = 1; i < states.size(); i++) {
TermsEnumState state = states.get(i);
int cmp = lowestTerm.compareTo(state.term);
if (cmp > 0) {
lowestTerm = state.term;
stateSlots.clear();
stateSlots.add(i);
} else if (cmp == 0) {
stateSlots.add(i);
}
}
current = lowestTerm;
return SeekStatus.NOT_FOUND;
}
}
class TermsEnumState {
final TermsEnum termsEnum;
BytesRef term;
SeekStatus lastSeekStatus;
TermsEnumState(TermsEnum termsEnum) {
this.termsEnum = termsEnum;
}
void initialize() throws IOException {
term = termsEnum.next();
}
BytesRef next() throws IOException {
return term = termsEnum.next();
}
SeekStatus seekCeil(BytesRef text) throws IOException {
lastSeekStatus = termsEnum.seekCeil(text);
if (lastSeekStatus != SeekStatus.END) {
term = termsEnum.term();
}
return lastSeekStatus;
}
}
class CompoundDocsEnum extends PostingsEnum {
final List<State> states;
int current = -1;
CompoundDocsEnum(List<PostingsEnum> docsEnums) {
this.states = new ArrayList<>(docsEnums.size());
for (PostingsEnum docsEnum : docsEnums) {
states.add(new State(docsEnum));
}
}
@Override
public int freq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int docID() {
return current;
}
@Override
public int nextDoc() throws IOException {
if (states.isEmpty()) {
return current = NO_MORE_DOCS;
}
if (current == -1) {
for (State state : states) {
state.initialize();
}
}
int lowestIndex = 0;
int lowestDocId = states.get(0).current;
for (int i = 1; i < states.size(); i++) {
State state = states.get(i);
if (lowestDocId > state.current) {
lowestDocId = state.current;
lowestIndex = i;
}
}
if (states.get(lowestIndex).next() == DocIdSetIterator.NO_MORE_DOCS) {
states.remove(lowestIndex);
}
return current = lowestDocId;
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
@Override
public int endOffset() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public BytesRef getPayload() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int nextPosition() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int startOffset() throws IOException {
throw new UnsupportedOperationException();
}
class State {
final PostingsEnum docsEnum;
int current = -1;
State(PostingsEnum docsEnum) {
this.docsEnum = docsEnum;
}
void initialize() throws IOException {
current = docsEnum.nextDoc();
}
int next() throws IOException {
return current = docsEnum.nextDoc();
}
}
}
@Override
public long ord() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void seekExact(long ord) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int docFreq() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long totalTermFreq() throws IOException {
throw new UnsupportedOperationException();
}
}

View File

@ -263,7 +263,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
if (mapper.type().contains(",")) { if (mapper.type().contains(",")) {
throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it"); throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it");
} }
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1) && mapper.type().equals(mapper.parentFieldMapper().type())) { if (mapper.type().equals(mapper.parentFieldMapper().type())) {
throw new IllegalArgumentException("The [_parent.type] option can't point to the same type"); throw new IllegalArgumentException("The [_parent.type] option can't point to the same type");
} }
if (typeNameStartsWithIllegalDot(mapper)) { if (typeNameStartsWithIllegalDot(mapper)) {

View File

@ -37,8 +37,6 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery
private Integer maxChildren; private Integer maxChildren;
private Integer shortCircuitCutoff;
private String queryName; private String queryName;
private QueryInnerHitBuilder innerHit = null; private QueryInnerHitBuilder innerHit = null;
@ -82,15 +80,6 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery
return this; return this;
} }
/**
* Configures at what cut off point only to evaluate parent documents that contain the matching parent id terms
* instead of evaluating all parent docs.
*/
public HasChildQueryBuilder setShortCircuitCutoff(int shortCircuitCutoff) {
this.shortCircuitCutoff = shortCircuitCutoff;
return this;
}
/** /**
* Sets the query name for the filter that can be used when searching for matched_filters per hit. * Sets the query name for the filter that can be used when searching for matched_filters per hit.
*/ */
@ -125,9 +114,6 @@ public class HasChildQueryBuilder extends QueryBuilder implements BoostableQuery
if (maxChildren != null) { if (maxChildren != null) {
builder.field("max_children", maxChildren); builder.field("max_children", maxChildren);
} }
if (shortCircuitCutoff != null) {
builder.field("short_circuit_cutoff", shortCircuitCutoff);
}
if (queryName != null) { if (queryName != null) {
builder.field("_name", queryName); builder.field("_name", queryName);
} }

View File

@ -21,14 +21,10 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -40,9 +36,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.XContentStructure; import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.ChildrenQuery;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
@ -79,7 +72,6 @@ public class HasChildQueryParser implements QueryParser {
ScoreType scoreType = ScoreType.NONE; ScoreType scoreType = ScoreType.NONE;
int minChildren = 0; int minChildren = 0;
int maxChildren = 0; int maxChildren = 0;
int shortCircuitParentDocSet = 8192;
String queryName = null; String queryName = null;
InnerHitsSubSearchContext innerHits = null; InnerHitsSubSearchContext innerHits = null;
@ -117,8 +109,6 @@ public class HasChildQueryParser implements QueryParser {
minChildren = parser.intValue(true); minChildren = parser.intValue(true);
} else if ("max_children".equals(currentFieldName) || "maxChildren".equals(currentFieldName)) { } else if ("max_children".equals(currentFieldName) || "maxChildren".equals(currentFieldName)) {
maxChildren = parser.intValue(true); maxChildren = parser.intValue(true);
} else if ("short_circuit_cutoff".equals(currentFieldName)) {
shortCircuitParentDocSet = parser.intValue();
} else if ("_name".equals(currentFieldName)) { } else if ("_name".equals(currentFieldName)) {
queryName = parser.text(); queryName = parser.text();
} else { } else {
@ -167,29 +157,12 @@ public class HasChildQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'"); throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
} }
BitSetProducer nonNestedDocsFilter = null;
if (parentDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter());
}
// wrap the query with type query // wrap the query with type query
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter()); innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
final Query query; final Query query;
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType()); final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren); query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
} else {
// TODO: use the query API
Filter parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren,
maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter);
} else {
query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter,
shortCircuitParentDocSet, nonNestedDocsFilter);
}
}
if (queryName != null) { if (queryName != null) {
parseContext.addNamedQuery(queryName, query); parseContext.addNamedQuery(queryName, query);
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -30,9 +29,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.XContentStructure; import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.ParentConstantScoreQuery;
import org.elasticsearch.index.search.child.ParentQuery;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
@ -173,11 +169,11 @@ public class HasParentQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "[has_parent] no _parent field configured"); throw new QueryParsingException(parseContext, "[has_parent] no _parent field configured");
} }
Query parentFilter = null; Query parentTypeQuery = null;
if (parentTypes.size() == 1) { if (parentTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.iterator().next()); DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.iterator().next());
if (documentMapper != null) { if (documentMapper != null) {
parentFilter = documentMapper.typeFilter(); parentTypeQuery = documentMapper.typeFilter();
} }
} else { } else {
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder(); BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
@ -187,26 +183,18 @@ public class HasParentQueryParser implements QueryParser {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
} }
} }
parentFilter = parentsFilter.build(); parentTypeQuery = parentsFilter.build();
} }
if (parentFilter == null) { if (parentTypeQuery == null) {
return null; return null;
} }
// wrap the query with type query // wrap the query with type query
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter()); innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
Filter childrenFilter = new QueryWrapperFilter(Queries.not(parentFilter)); Query childrenFilter = Queries.not(parentTypeQuery);
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) {
ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE; ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE;
return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE); return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE);
} else {
if (score) {
return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
} else {
return new ParentConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
}
}
} }
} }

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package org.elasticsearch.index.search.child; package org.elasticsearch.index.query;
/** /**

View File

@ -1,304 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
*
*/
// TODO: Remove me and move the logic to ChildrenQuery when needsScore=false
public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
private final IndexParentChildFieldData parentChildIndexFieldData;
private final Query childQuery;
private final String parentType;
private final String childType;
private final Filter parentFilter;
private final int shortCircuitParentDocSet;
private final BitSetProducer nonNestedDocsFilter;
public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData;
this.parentFilter = parentFilter;
this.parentType = parentType;
this.childType = childType;
this.childQuery = childQuery;
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
this.nonNestedDocsFilter = nonNestedDocsFilter;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
final Query childRewritten = childQuery.rewrite(reader);
if (childRewritten != childQuery) {
ChildrenConstantScoreQuery rewritten = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childRewritten, parentType, childType, parentFilter, shortCircuitParentDocSet, nonNestedDocsFilter);
rewritten.setBoost(getBoost());
return rewritten;
}
return super.rewrite(reader);
}
@Override
public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
SearchContext sc = SearchContext.current();
IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader());
final long valueCount;
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} else {
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
valueCount = globalValues.getValueCount();
}
if (valueCount == 0) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
searcher.search(childQuery, collector);
final long remaining = collector.foundParents();
if (remaining == 0) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
Filter shortCircuitFilter = null;
if (remaining <= shortCircuitParentDocSet) {
shortCircuitFilter = ParentIdsFilter.createShortCircuitFilter(
nonNestedDocsFilter, sc, parentType, collector.values, collector.parentOrds, remaining
);
}
return new ParentWeight(this, parentFilter, globalIfd, shortCircuitFilter, collector, remaining);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (super.equals(obj) == false) {
return false;
}
ChildrenConstantScoreQuery that = (ChildrenConstantScoreQuery) obj;
if (!childQuery.equals(that.childQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
if (shortCircuitParentDocSet != that.shortCircuitParentDocSet) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + childQuery.hashCode();
result = 31 * result + childType.hashCode();
result = 31 * result + shortCircuitParentDocSet;
return result;
}
@Override
public String toString(String field) {
return "child_filter[" + childType + "/" + parentType + "](" + childQuery + ')';
}
private final class ParentWeight extends Weight {
private final Filter parentFilter;
private final Filter shortCircuitFilter;
private final ParentOrdCollector collector;
private final IndexParentChildFieldData globalIfd;
private long remaining;
private float queryNorm;
private float queryWeight;
public ParentWeight(Query query, Filter parentFilter, IndexParentChildFieldData globalIfd, Filter shortCircuitFilter, ParentOrdCollector collector, long remaining) {
super(query);
this.parentFilter = parentFilter;
this.globalIfd = globalIfd;
this.shortCircuitFilter = shortCircuitFilter;
this.collector = collector;
this.remaining = remaining;
}
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
if (remaining == 0) {
return null;
}
if (shortCircuitFilter != null) {
DocIdSet docIdSet = shortCircuitFilter.getDocIdSet(context, null);
if (!Lucene.isEmpty(docIdSet)) {
DocIdSetIterator iterator = docIdSet.iterator();
if (iterator != null) {
return ConstantScorer.create(iterator, this, queryWeight);
}
}
return null;
}
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, null);
if (!Lucene.isEmpty(parentDocIdSet)) {
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected.
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
if (innerIterator != null) {
LongBitSet parentOrds = collector.parentOrds;
SortedDocValues globalValues = globalIfd.load(context).getOrdinalsValues(parentType);
if (globalValues != null) {
DocIdSetIterator parentIdIterator = new ParentOrdIterator(innerIterator, parentOrds, globalValues, this);
return ConstantScorer.create(parentIdIterator, this, queryWeight);
}
}
}
return null;
}
}
private final static class ParentOrdCollector extends NoopCollector {
private final LongBitSet parentOrds;
private final IndexParentChildFieldData indexFieldData;
private final String parentType;
private SortedDocValues values;
private ParentOrdCollector(IndexParentChildFieldData indexFieldData, long maxOrd, String parentType) {
// TODO: look into reusing LongBitSet#bits array
this.parentOrds = new LongBitSet(maxOrd + 1);
this.indexFieldData = indexFieldData;
this.parentType = parentType;
}
@Override
public void collect(int doc) throws IOException {
if (values != null) {
int globalOrdinal = values.getOrd(doc);
// TODO: oversize the long bitset and remove the branch
if (globalOrdinal >= 0) {
parentOrds.set(globalOrdinal);
}
}
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
values = indexFieldData.load(context).getOrdinalsValues(parentType);
}
long foundParents() {
return parentOrds.cardinality();
}
}
private final static class ParentOrdIterator extends XFilteredDocIdSetIterator {
private final LongBitSet parentOrds;
private final SortedDocValues ordinals;
private final ParentWeight parentWeight;
private ParentOrdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, SortedDocValues ordinals, ParentWeight parentWeight) {
super(innerIterator);
this.parentOrds = parentOrds;
this.ordinals = ordinals;
this.parentWeight = parentWeight;
}
@Override
protected boolean match(int doc) {
if (parentWeight.remaining == 0) {
throw new CollectionTerminatedException();
}
long parentOrd = ordinals.getOrd(doc);
if (parentOrd >= 0) {
boolean match = parentOrds.get(parentOrd);
if (match) {
parentWeight.remaining--;
}
return match;
}
return false;
}
}
}

View File

@ -1,724 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.Set;
/**
* A query implementation that executes the wrapped child query and connects all the matching child docs to the related
* parent documents using {@link ParentChildIndexFieldData}.
* <p/>
* This query is executed in two rounds. The first round resolves all the matching child documents and groups these
* documents by parent uid value. Also the child scores are aggregated per parent uid value. During the second round
* all parent documents having the same uid value that is collected in the first phase are emitted as hit including
* a score based on the aggregated child scores and score type.
*/
public final class ChildrenQuery extends IndexCacheableQuery {
protected final ParentChildIndexFieldData ifd;
protected final String parentType;
protected final String childType;
protected final Filter parentFilter;
protected final ScoreType scoreType;
protected Query childQuery;
protected final int minChildren;
protected final int maxChildren;
protected final int shortCircuitParentDocSet;
protected final BitSetProducer nonNestedDocsFilter;
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
this.ifd = ifd;
this.parentType = parentType;
this.childType = childType;
this.parentFilter = parentFilter;
this.childQuery = childQuery;
this.scoreType = scoreType;
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
this.nonNestedDocsFilter = nonNestedDocsFilter;
assert maxChildren == 0 || minChildren <= maxChildren;
this.minChildren = minChildren > 1 ? minChildren : 0;
this.maxChildren = maxChildren;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
final Query childRewritten = childQuery.rewrite(reader);
if (childRewritten != childQuery) {
Query rewritten = new ChildrenQuery(ifd, parentType, childType, parentFilter, childRewritten, scoreType, minChildren, maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter);
rewritten.setBoost(getBoost());
return rewritten;
}
return super.rewrite(reader);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (super.equals(obj) == false) {
return false;
}
ChildrenQuery that = (ChildrenQuery) obj;
if (!childQuery.equals(that.childQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
if (minChildren != that.minChildren) {
return false;
}
if (maxChildren != that.maxChildren) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + childQuery.hashCode();
result = 31 * result + childType.hashCode();
result = 31 * result + minChildren;
result = 31 * result + maxChildren;
return result;
}
@Override
public String toString(String field) {
int max = maxChildren == 0 ? Integer.MAX_VALUE : maxChildren;
return "ChildrenQuery[min(" + Integer.toString(minChildren) + ") max(" + Integer.toString(max) + ")of " + childType + "/"
+ parentType + "](" + childQuery.toString(field) + ')' + ToStringUtils.boost(getBoost());
}
@Override
public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
SearchContext sc = SearchContext.current();
IndexParentChildFieldData globalIfd = ifd.loadGlobal(searcher.getIndexReader());
if (globalIfd == null) {
// No docs of the specified type exist on this shard
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
boolean abort = true;
long numFoundParents;
ParentCollector collector = null;
try {
if (minChildren == 0 && maxChildren == 0 && scoreType != ScoreType.NONE) {
switch (scoreType) {
case MIN:
collector = new MinCollector(globalIfd, sc, parentType);
break;
case MAX:
collector = new MaxCollector(globalIfd, sc, parentType);
break;
case SUM:
collector = new SumCollector(globalIfd, sc, parentType);
break;
}
}
if (collector == null) {
switch (scoreType) {
case MIN:
collector = new MinCountCollector(globalIfd, sc, parentType);
break;
case MAX:
collector = new MaxCountCollector(globalIfd, sc, parentType);
break;
case SUM:
case AVG:
collector = new SumCountAndAvgCollector(globalIfd, sc, parentType);
break;
case NONE:
collector = new CountCollector(globalIfd, sc, parentType);
break;
default:
throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
}
}
searcher.search(childQuery, collector);
numFoundParents = collector.foundParents();
if (numFoundParents == 0) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
abort = false;
} finally {
if (abort) {
Releasables.close(collector);
}
}
sc.addReleasable(collector, Lifetime.COLLECTION);
final Filter parentFilter;
if (numFoundParents <= shortCircuitParentDocSet) {
parentFilter = ParentIdsFilter.createShortCircuitFilter(nonNestedDocsFilter, sc, parentType, collector.values,
collector.parentIdxs, numFoundParents);
} else {
parentFilter = this.parentFilter;
}
return new ParentWeight(this, childQuery.createWeight(searcher, needsScores), parentFilter, numFoundParents, collector, minChildren,
maxChildren);
}
protected class ParentWeight extends Weight {
protected final Weight childWeight;
protected final Filter parentFilter;
protected final ParentCollector collector;
protected final int minChildren;
protected final int maxChildren;
protected long remaining;
protected float queryNorm;
protected float queryWeight;
protected ParentWeight(Query query, Weight childWeight, Filter parentFilter, long remaining, ParentCollector collector, int minChildren, int maxChildren) {
super(query);
this.childWeight = childWeight;
this.parentFilter = parentFilter;
this.remaining = remaining;
this.collector = collector;
this.minChildren = minChildren;
this.maxChildren = maxChildren;
}
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
if (scoreType == ScoreType.NONE) {
return queryWeight * queryWeight;
}
float sum = childWeight.getValueForNormalization();
sum *= queryWeight * queryWeight;
return sum;
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
DocIdSet parentsSet = parentFilter.getDocIdSet(context, null);
if (Lucene.isEmpty(parentsSet) || remaining == 0) {
return null;
}
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected.
DocIdSetIterator parents = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
if (parents != null) {
SortedDocValues bytesValues = collector.globalIfd.load(context).getOrdinalsValues(parentType);
if (bytesValues == null) {
return null;
}
if (minChildren > 0 || maxChildren != 0 || scoreType == ScoreType.NONE) {
switch (scoreType) {
case NONE:
DocIdSetIterator parentIdIterator = new CountParentOrdIterator(this, parents, collector, bytesValues,
minChildren, maxChildren);
return ConstantScorer.create(parentIdIterator, this, queryWeight);
case AVG:
return new AvgParentCountScorer(this, parents, collector, bytesValues, minChildren, maxChildren);
default:
return new ParentCountScorer(this, parents, collector, bytesValues, minChildren, maxChildren);
}
}
switch (scoreType) {
case AVG:
return new AvgParentScorer(this, parents, collector, bytesValues);
default:
return new ParentScorer(this, parents, collector, bytesValues);
}
}
return null;
}
}
protected abstract static class ParentCollector extends NoopCollector implements Releasable {
protected final IndexParentChildFieldData globalIfd;
protected final LongHash parentIdxs;
protected final BigArrays bigArrays;
protected final SearchContext searchContext;
protected final String parentType;
protected SortedDocValues values;
protected Scorer scorer;
protected ParentCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
this.globalIfd = globalIfd;
this.searchContext = searchContext;
this.bigArrays = searchContext.bigArrays();
this.parentIdxs = new LongHash(512, bigArrays);
this.parentType = parentType;
}
@Override
public final void collect(int doc) throws IOException {
if (values != null) {
final long globalOrdinal = values.getOrd(doc);
if (globalOrdinal >= 0) {
long parentIdx = parentIdxs.add(globalOrdinal);
if (parentIdx >= 0) {
newParent(parentIdx);
} else {
parentIdx = -1 - parentIdx;
existingParent(parentIdx);
}
}
}
}
protected void newParent(long parentIdx) throws IOException {
}
protected void existingParent(long parentIdx) throws IOException {
}
public long foundParents() {
return parentIdxs.size();
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
values = globalIfd.load(context).getOrdinalsValues(parentType);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void close() {
Releasables.close(parentIdxs);
}
}
protected abstract static class ParentScoreCollector extends ParentCollector implements Releasable {
protected FloatArray scores;
protected ParentScoreCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
this.scores = this.bigArrays.newFloatArray(512, false);
}
@Override
public boolean needsScores() {
return true;
}
@Override
protected void newParent(long parentIdx) throws IOException {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
}
@Override
public void close() {
Releasables.close(parentIdxs, scores);
}
}
protected abstract static class ParentScoreCountCollector extends ParentScoreCollector implements Releasable {
protected IntArray occurrences;
protected ParentScoreCountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
this.occurrences = bigArrays.newIntArray(512, false);
}
@Override
protected void newParent(long parentIdx) throws IOException {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
occurrences.set(parentIdx, 1);
}
@Override
public void close() {
Releasables.close(parentIdxs, scores, occurrences);
}
}
private final static class CountCollector extends ParentCollector implements Releasable {
protected IntArray occurrences;
protected CountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
this.occurrences = bigArrays.newIntArray(512, false);
}
@Override
protected void newParent(long parentIdx) throws IOException {
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
occurrences.set(parentIdx, 1);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
occurrences.increment(parentIdx, 1);
}
@Override
public void close() {
Releasables.close(parentIdxs, occurrences);
}
}
private final static class SumCollector extends ParentScoreCollector {
private SumCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
scores.increment(parentIdx, scorer.score());
}
}
private final static class MaxCollector extends ParentScoreCollector {
private MaxCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore > scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
}
}
private final static class MinCollector extends ParentScoreCollector {
private MinCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore < scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
}
}
private final static class MaxCountCollector extends ParentScoreCountCollector {
private MaxCountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore > scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
occurrences.increment(parentIdx, 1);
}
}
private final static class MinCountCollector extends ParentScoreCountCollector {
private MinCountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore < scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
occurrences.increment(parentIdx, 1);
}
}
private final static class SumCountAndAvgCollector extends ParentScoreCountCollector {
SumCountAndAvgCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
scores.increment(parentIdx, scorer.score());
occurrences.increment(parentIdx, 1);
}
}
private static class ParentScorer extends Scorer {
final ParentWeight parentWeight;
final LongHash parentIds;
final FloatArray scores;
final SortedDocValues globalOrdinals;
final DocIdSetIterator parentsIterator;
int currentDocId = -1;
float currentScore;
ParentScorer(ParentWeight parentWeight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals) {
super(parentWeight);
this.parentWeight = parentWeight;
this.globalOrdinals = globalOrdinals;
this.parentsIterator = parentsIterator;
this.parentIds = collector.parentIdxs;
this.scores = ((ParentScoreCollector) collector).scores;
}
@Override
public float score() throws IOException {
return currentScore;
}
protected boolean acceptAndScore(long parentIdx) {
currentScore = scores.get(parentIdx);
return true;
}
@Override
public int freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentDocId;
}
@Override
public int nextDoc() throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final int globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal < 0) {
continue;
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
parentWeight.remaining--;
if (acceptAndScore(parentIdx)) {
return currentDocId;
}
}
}
}
@Override
public int advance(int target) throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal < 0) {
return nextDoc();
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
parentWeight.remaining--;
if (acceptAndScore(parentIdx)) {
return currentDocId;
}
}
return nextDoc();
}
@Override
public long cost() {
return parentsIterator.cost();
}
}
private static class ParentCountScorer extends ParentScorer {
protected final IntArray occurrences;
protected final int minChildren;
protected final int maxChildren;
ParentCountScorer(ParentWeight parentWeight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals, int minChildren, int maxChildren) {
super(parentWeight, parentsIterator, (ParentScoreCollector) collector, globalOrdinals);
this.minChildren = minChildren;
this.maxChildren = maxChildren == 0 ? Integer.MAX_VALUE : maxChildren;
this.occurrences = ((ParentScoreCountCollector) collector).occurrences;
}
@Override
protected boolean acceptAndScore(long parentIdx) {
int count = occurrences.get(parentIdx);
if (count < minChildren || count > maxChildren) {
return false;
}
return super.acceptAndScore(parentIdx);
}
}
private static final class AvgParentScorer extends ParentCountScorer {
AvgParentScorer(ParentWeight weight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals) {
super(weight, parentsIterator, collector, globalOrdinals, 0, 0);
}
@Override
protected boolean acceptAndScore(long parentIdx) {
currentScore = scores.get(parentIdx);
currentScore /= occurrences.get(parentIdx);
return true;
}
}
private static final class AvgParentCountScorer extends ParentCountScorer {
AvgParentCountScorer(ParentWeight weight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals, int minChildren, int maxChildren) {
super(weight, parentsIterator, collector, globalOrdinals, minChildren, maxChildren);
}
@Override
protected boolean acceptAndScore(long parentIdx) {
int count = occurrences.get(parentIdx);
if (count < minChildren || count > maxChildren) {
return false;
}
currentScore = scores.get(parentIdx);
currentScore /= occurrences.get(parentIdx);
return true;
}
}
private final static class CountParentOrdIterator extends XFilteredDocIdSetIterator {
private final LongHash parentIds;
protected final IntArray occurrences;
private final int minChildren;
private final int maxChildren;
private final SortedDocValues ordinals;
private final ParentWeight parentWeight;
private CountParentOrdIterator(ParentWeight parentWeight, DocIdSetIterator innerIterator, ParentCollector collector, SortedDocValues ordinals, int minChildren, int maxChildren) {
super(innerIterator);
this.parentIds = ((CountCollector) collector).parentIdxs;
this.occurrences = ((CountCollector) collector).occurrences;
this.ordinals = ordinals;
this.parentWeight = parentWeight;
this.minChildren = minChildren;
this.maxChildren = maxChildren == 0 ? Integer.MAX_VALUE : maxChildren;
}
@Override
protected boolean match(int doc) {
if (parentWeight.remaining == 0) {
throw new CollectionTerminatedException();
}
final long parentOrd = ordinals.getOrd(doc);
if (parentOrd >= 0) {
final long parentIdx = parentIds.find(parentOrd);
if (parentIdx != -1) {
parentWeight.remaining--;
int count = occurrences.get(parentIdx);
if (count >= minChildren && count <= maxChildren) {
return true;
}
}
}
return false;
}
}
}

View File

@ -1,77 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
/**
* A scorer that wraps a {@link DocIdSetIterator} and emits a constant score.
*/
// Borrowed from ConstantScoreQuery
public class ConstantScorer extends Scorer {
public static ConstantScorer create(DocIdSetIterator iterator, Weight weight, float constantScore) throws IOException {
return new ConstantScorer(iterator, weight, constantScore);
}
private final DocIdSetIterator docIdSetIterator;
private final float constantScore;
private ConstantScorer(DocIdSetIterator docIdSetIterator, Weight w, float constantScore) {
super(w);
this.constantScore = constantScore;
this.docIdSetIterator = docIdSetIterator;
}
@Override
public int nextDoc() throws IOException {
return docIdSetIterator.nextDoc();
}
@Override
public int docID() {
return docIdSetIterator.docID();
}
@Override
public float score() throws IOException {
assert docIdSetIterator.docID() != NO_MORE_DOCS;
return constantScore;
}
@Override
public int freq() throws IOException {
return 1;
}
@Override
public int advance(int target) throws IOException {
return docIdSetIterator.advance(target);
}
@Override
public long cost() {
return docIdSetIterator.cost();
}
}

View File

@ -1,257 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
* A query that only return child documents that are linked to the parent documents that matched with the inner query.
*/
public class ParentConstantScoreQuery extends IndexCacheableQuery {
private final ParentChildIndexFieldData parentChildIndexFieldData;
private Query parentQuery;
private final String parentType;
private final Filter childrenFilter;
public ParentConstantScoreQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, Filter childrenFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData;
this.parentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = childrenFilter;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query parentRewritten = parentQuery.rewrite(reader);
if (parentRewritten != parentQuery) {
Query rewritten = new ParentConstantScoreQuery(parentChildIndexFieldData, parentRewritten, parentType, childrenFilter);
rewritten.setBoost(getBoost());
return rewritten;
}
return super.rewrite(reader);
}
@Override
public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader());
final long maxOrd;
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} else {
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
maxOrd = globalValues.getValueCount();
}
if (maxOrd == 0) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
return new ChildrenWeight(this, childrenFilter, collector, globalIfd);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + parentQuery.hashCode();
result = 31 * result + parentType.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (super.equals(obj) == false) {
return false;
}
ParentConstantScoreQuery that = (ParentConstantScoreQuery) obj;
if (!parentQuery.equals(that.parentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
return true;
}
@Override
public String toString(String field) {
return "parent_filter[" + parentType + "](" + parentQuery + ')';
}
private final class ChildrenWeight extends Weight {
private final IndexParentChildFieldData globalIfd;
private final Filter childrenFilter;
private final LongBitSet parentOrds;
private float queryNorm;
private float queryWeight;
private ChildrenWeight(Query query, Filter childrenFilter, ParentOrdsCollector collector, IndexParentChildFieldData globalIfd) {
super(query);
this.globalIfd = globalIfd;
this.childrenFilter = childrenFilter;
this.parentOrds = collector.parentOrds;
}
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, null);
if (Lucene.isEmpty(childrenDocIdSet)) {
return null;
}
SortedDocValues globalValues = globalIfd.load(context).getOrdinalsValues(parentType);
if (globalValues != null) {
// we forcefully apply live docs here so that deleted children don't give matching parents
childrenDocIdSet = BitsFilteredDocIdSet.wrap(childrenDocIdSet, context.reader().getLiveDocs());
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
if (innerIterator != null) {
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(
innerIterator, parentOrds, globalValues
);
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
}
}
return null;
}
}
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
private final LongBitSet parentOrds;
private final SortedDocValues globalOrdinals;
ChildrenDocIdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, SortedDocValues globalOrdinals) {
super(innerIterator);
this.parentOrds = parentOrds;
this.globalOrdinals = globalOrdinals;
}
@Override
protected boolean match(int docId) {
int globalOrd = globalOrdinals.getOrd(docId);
if (globalOrd >= 0) {
return parentOrds.get(globalOrd);
} else {
return false;
}
}
}
private final static class ParentOrdsCollector extends NoopCollector {
private final LongBitSet parentOrds;
private final IndexParentChildFieldData globalIfd;
private final String parentType;
private SortedDocValues globalOrdinals;
ParentOrdsCollector(IndexParentChildFieldData globalIfd, long maxOrd, String parentType) {
this.parentOrds = new LongBitSet(maxOrd);
this.globalIfd = globalIfd;
this.parentType = parentType;
}
@Override
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (globalOrdinals != null) {
long globalOrd = globalOrdinals.getOrd(doc);
if (globalOrd >= 0) {
parentOrds.set(globalOrd);
}
}
}
@Override
public void doSetNextReader(LeafReaderContext readerContext) throws IOException {
globalOrdinals = globalIfd.load(readerContext).getOrdinalsValues(parentType);
}
public long parentCount() {
return parentOrds.cardinality();
}
}
}

View File

@ -1,187 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LongBitSet;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* Advantages over using this filter over Lucene's TermsFilter in the parent child context:
* 1) Don't need to copy all values over to a list from the id cache and then
* copy all the ids values over to one continuous byte array. Should save a lot of of object creations and gcs..
* 2) We filter docs by one field only.
*/
final class ParentIdsFilter extends Filter {
static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext,
String parentType, SortedDocValues globalValues,
LongBitSet parentOrds, long numFoundParents) {
BytesRefHash parentIds= null;
boolean constructed = false;
try {
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
parentIds.add(globalValues.lookupOrd((int) parentOrd));
}
constructed = true;
} finally {
if (!constructed) {
Releasables.close(parentIds);
}
}
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
}
static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext,
String parentType, SortedDocValues globalValues,
LongHash parentIdxs, long numFoundParents) {
BytesRefHash parentIds = null;
boolean constructed = false;
try {
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
for (int id = 0; id < parentIdxs.size(); id++) {
parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id)));
}
constructed = true;
} finally {
if (!constructed) {
Releasables.close(parentIds);
}
}
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
}
private final BytesRef parentTypeBr;
private final BitSetProducer nonNestedDocsFilter;
private final BytesRefHash parentIds;
private ParentIdsFilter(String parentType, BitSetProducer nonNestedDocsFilter, BytesRefHash parentIds) {
this.nonNestedDocsFilter = nonNestedDocsFilter;
this.parentTypeBr = new BytesRef(parentType);
this.parentIds = parentIds;
}
@Override
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
Terms terms = context.reader().terms(UidFieldMapper.NAME);
if (terms == null) {
return null;
}
TermsEnum termsEnum = terms.iterator();
BytesRefBuilder uidSpare = new BytesRefBuilder();
BytesRef idSpare = new BytesRef();
if (acceptDocs == null) {
acceptDocs = context.reader().getLiveDocs();
}
BitSet nonNestedDocs = null;
if (nonNestedDocsFilter != null) {
nonNestedDocs = nonNestedDocsFilter.getBitSet(context);
}
PostingsEnum docsEnum = null;
BitSet result = null;
int size = (int) parentIds.size();
for (int i = 0; i < size; i++) {
parentIds.get(i, idSpare);
BytesRef uid = Uid.createUidAsBytes(parentTypeBr, idSpare, uidSpare);
if (termsEnum.seekExact(uid)) {
docsEnum = termsEnum.postings(docsEnum, PostingsEnum.NONE);
int docId;
for (docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) {
if (acceptDocs == null || acceptDocs.get(docId)) {
break;
}
}
if (docId == DocIdSetIterator.NO_MORE_DOCS) {
continue;
}
if (result == null) {
// very rough heuristic that tries to get an idea of the number of documents
// in the set based on the number of parent ids that we didn't find in this segment
final int expectedCardinality = size / (i + 1);
// similar heuristic to BitDocIdSet.Builder
if (expectedCardinality >= (context.reader().maxDoc() >>> 10)) {
result = new FixedBitSet(context.reader().maxDoc());
} else {
result = new SparseFixedBitSet(context.reader().maxDoc());
}
}
if (nonNestedDocs != null) {
docId = nonNestedDocs.nextSetBit(docId);
}
result.set(docId);
assert docsEnum.advance(docId + 1) == DocIdSetIterator.NO_MORE_DOCS : "DocId " + docId + " should have been the last one but docId " + docsEnum.docID() + " exists.";
}
}
return result == null ? null : new BitDocIdSet(result);
}
@Override
public String toString(String field) {
return "parentsFilter(type=" + parentTypeBr.utf8ToString() + ")";
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
ParentIdsFilter other = (ParentIdsFilter) obj;
return parentTypeBr.equals(other.parentTypeBr)
&& parentIds.equals(other.parentIds)
&& nonNestedDocsFilter.equals(nonNestedDocsFilter);
}
@Override
public int hashCode() {
int h = super.hashCode();
h = 31 * h + parentTypeBr.hashCode();
h = 31 * h + parentIds.hashCode();
h = 31 * h + nonNestedDocsFilter.hashCode();
return h;
}
}

View File

@ -1,350 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.Set;
/**
* A query implementation that executes the wrapped parent query and
* connects the matching parent docs to the related child documents
* using the {@link ParentChildIndexFieldData}.
*/
public class ParentQuery extends IndexCacheableQuery {
private final ParentChildIndexFieldData parentChildIndexFieldData;
private Query parentQuery;
private final String parentType;
private final Filter childrenFilter;
public ParentQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, Filter childrenFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData;
this.parentQuery = parentQuery;
this.parentType = parentType;
this.childrenFilter = childrenFilter;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (super.equals(obj) == false) {
return false;
}
ParentQuery that = (ParentQuery) obj;
if (!parentQuery.equals(that.parentQuery)) {
return false;
}
if (!parentType.equals(that.parentType)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + parentQuery.hashCode();
result = 31 * result + parentType.hashCode();
result = 31 * result + Float.floatToIntBits(getBoost());
return result;
}
@Override
public String toString(String field) {
return "ParentQuery[" + parentType + "](" + parentQuery.toString(field) + ')' + ToStringUtils.boost(getBoost());
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query parentRewritten = parentQuery.rewrite(reader);
if (parentRewritten != parentQuery) {
Query rewritten = new ParentQuery(parentChildIndexFieldData, parentRewritten, parentType, childrenFilter);
rewritten.setBoost(getBoost());
return rewritten;
}
return super.rewrite(reader);
}
@Override
public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
SearchContext sc = SearchContext.current();
ChildWeight childWeight;
boolean releaseCollectorResource = true;
ParentOrdAndScoreCollector collector = null;
IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader());
if (globalIfd == null) {
// No docs of the specified type don't exist on this shard
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
try {
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
}
childWeight = new ChildWeight(this, parentQuery.createWeight(searcher, needsScores), childrenFilter, collector, globalIfd);
releaseCollectorResource = false;
} finally {
if (releaseCollectorResource) {
// either if we run into an exception or if we return early
Releasables.close(collector);
}
}
sc.addReleasable(collector, Lifetime.COLLECTION);
return childWeight;
}
private static class ParentOrdAndScoreCollector implements Collector, Releasable {
private final LongHash parentIdxs;
private FloatArray scores;
private final IndexParentChildFieldData globalIfd;
private final BigArrays bigArrays;
private final String parentType;
ParentOrdAndScoreCollector(SearchContext searchContext, IndexParentChildFieldData globalIfd, String parentType) {
this.bigArrays = searchContext.bigArrays();
this.parentIdxs = new LongHash(512, bigArrays);
this.scores = bigArrays.newFloatArray(512, false);
this.globalIfd = globalIfd;
this.parentType = parentType;
}
@Override
public boolean needsScores() {
return true;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
final SortedDocValues values = globalIfd.load(context).getOrdinalsValues(parentType);
if (values == null) {
return NoopCollector.NOOP_COLLECTOR;
}
return new LeafCollector() {
Scorer scorer;
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void collect(int doc) throws IOException {
long globalOrdinal = values.getOrd(doc);
if (globalOrdinal != SortedSetDocValues.NO_MORE_ORDS) {
long parentIdx = parentIdxs.add(globalOrdinal);
if (parentIdx >= 0) {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
} else {
assert false : "parent id should only match once, since there can only be one parent doc";
}
}
}
};
}
@Override
public void close() {
Releasables.close(parentIdxs, scores);
}
public long parentCount() {
return parentIdxs.size();
}
}
private class ChildWeight extends Weight {
private final Weight parentWeight;
private final Filter childrenFilter;
private final LongHash parentIdxs;
private final FloatArray scores;
private final IndexParentChildFieldData globalIfd;
private ChildWeight(Query query, Weight parentWeight, Filter childrenFilter, ParentOrdAndScoreCollector collector, IndexParentChildFieldData globalIfd) {
super(query);
this.parentWeight = parentWeight;
this.childrenFilter = childrenFilter;
this.parentIdxs = collector.parentIdxs;
this.scores = collector.scores;
this.globalIfd = globalIfd;
}
@Override
public void extractTerms(Set<Term> terms) {
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return Explanation.match(getBoost(), "not implemented yet...");
}
@Override
public float getValueForNormalization() throws IOException {
float sum = parentWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
DocIdSet childrenDocSet = childrenFilter.getDocIdSet(context, null);
// we forcefully apply live docs here so that deleted children don't give matching parents
childrenDocSet = BitsFilteredDocIdSet.wrap(childrenDocSet, context.reader().getLiveDocs());
if (Lucene.isEmpty(childrenDocSet)) {
return null;
}
final DocIdSetIterator childIterator = childrenDocSet.iterator();
if (childIterator == null) {
return null;
}
SortedDocValues bytesValues = globalIfd.load(context).getOrdinalsValues(parentType);
if (bytesValues == null) {
return null;
}
return new ChildScorer(this, parentIdxs, scores, childIterator, bytesValues);
}
}
private static class ChildScorer extends Scorer {
private final LongHash parentIdxs;
private final FloatArray scores;
private final DocIdSetIterator childrenIterator;
private final SortedDocValues ordinals;
private int currentChildDoc = -1;
private float currentScore;
ChildScorer(Weight weight, LongHash parentIdxs, FloatArray scores, DocIdSetIterator childrenIterator, SortedDocValues ordinals) {
super(weight);
this.parentIdxs = parentIdxs;
this.scores = scores;
this.childrenIterator = childrenIterator;
this.ordinals = ordinals;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public int freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentChildDoc;
}
@Override
public int nextDoc() throws IOException {
while (true) {
currentChildDoc = childrenIterator.nextDoc();
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
int globalOrdinal = (int) ordinals.getOrd(currentChildDoc);
if (globalOrdinal < 0) {
continue;
}
final long parentIdx = parentIdxs.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
return currentChildDoc;
}
}
}
@Override
public int advance(int target) throws IOException {
currentChildDoc = childrenIterator.advance(target);
if (currentChildDoc == DocIdSetIterator.NO_MORE_DOCS) {
return currentChildDoc;
}
int globalOrdinal = (int) ordinals.getOrd(currentChildDoc);
if (globalOrdinal < 0) {
return nextDoc();
}
final long parentIdx = parentIdxs.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
return currentChildDoc;
} else {
return nextDoc();
}
}
@Override
public long cost() {
return childrenIterator.cost();
}
}
}

View File

@ -20,16 +20,12 @@ package org.elasticsearch.search.aggregations.bucket.children;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.common.util.LongObjectPagedHashMap;
import org.elasticsearch.index.search.child.ConstantScorer;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -155,7 +151,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator {
final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx);
// Set the scorer, since we now replay only the child docIds // Set the scorer, since we now replay only the child docIds
sub.setScorer(ConstantScorer.create(childDocsIter, null, 1f)); sub.setScorer(new ConstantScoreScorer(null, 1f,childDocsIter));
final Bits liveDocs = ctx.reader().getLiveDocs(); final Bits liveDocs = ctx.reader().getLiveDocs();
for (int docId = childDocsIter.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = childDocsIter.nextDoc()) { for (int docId = childDocsIter.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = childDocsIter.nextDoc()) {

View File

@ -979,30 +979,6 @@ public class IndexAliasesIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).id(), equalTo("2")); assertThat(response.getHits().getAt(0).id(), equalTo("2"));
} }
@Test
public void testAliasesFilterWithHasChildQueryPre2Dot0() throws Exception {
assertAcked(prepareCreate("my-index")
.setSettings(Settings.builder()
.put(indexSettings())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_6_0)
)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent")
);
client().prepareIndex("my-index", "parent", "1").setSource("{}").get();
client().prepareIndex("my-index", "child", "2").setSource("{}").setParent("1").get();
refresh();
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", hasChildQuery("child", matchAllQuery())));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery())));
SearchResponse response = client().prepareSearch("filter1").get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
response = client().prepareSearch("filter2").get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("2"));
}
@Test @Test
public void testAliasesWithBlocks() { public void testAliasesWithBlocks() {
createIndex("test"); createIndex("test");

View File

@ -1,120 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.test.ESTestCase;
import org.junit.Test;
import java.util.Locale;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.core.IsNull.notNullValue;
/**
*/
public class ParentChildFilteredTermsEnumTests extends ESTestCase {
@Test
public void testSimple_twoFieldEachUniqueValue() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
for (int i = 1; i <= 10000; i++) {
Document document = new Document();
String fieldName = i % 2 == 0 ? "field1" : "field2";
document.add(new StringField(fieldName, format(i), Field.Store.NO));
indexWriter.addDocument(document);
}
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
TermsEnum[] compoundTermsEnums = new TermsEnum[]{
new ParentChildIntersectTermsEnum(SlowCompositeReaderWrapper.wrap(indexReader), "field1", "field2")
};
for (TermsEnum termsEnum : compoundTermsEnums) {
int expected = 0;
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.next()) {
++expected;
assertThat(term.utf8ToString(), equalTo(format(expected)));
PostingsEnum docsEnum = termsEnum.postings(null);
assertThat(docsEnum, notNullValue());
int docId = docsEnum.nextDoc();
assertThat(docId, not(equalTo(-1)));
assertThat(docId, not(equalTo(DocIdSetIterator.NO_MORE_DOCS)));
assertThat(docsEnum.nextDoc(), equalTo(DocIdSetIterator.NO_MORE_DOCS));
}
}
indexWriter.close();
indexReader.close();
directory.close();
}
@Test
public void testDocument_twoFieldsEachSharingValues() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
for (int i = 1; i <= 1000; i++) {
Document document = new Document();
document.add(new StringField("field1", format(i), Field.Store.NO));
indexWriter.addDocument(document);
for (int j = 0; j < 10; j++) {
document = new Document();
document.add(new StringField("field2", format(i), Field.Store.NO));
indexWriter.addDocument(document);
}
}
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
TermsEnum[] compoundTermsEnums = new TermsEnum[]{
new ParentChildIntersectTermsEnum(SlowCompositeReaderWrapper.wrap(indexReader), "field1", "field2")
};
for (TermsEnum termsEnum : compoundTermsEnums) {
int expected = 0;
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.next()) {
++expected;
assertThat(term.utf8ToString(), equalTo(format(expected)));
PostingsEnum docsEnum = termsEnum.postings(null);
assertThat(docsEnum, notNullValue());
int numDocs = 0;
for (int docId = docsEnum.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = docsEnum.nextDoc()) {
numDocs++;
}
assertThat(numDocs, equalTo(11));
}
}
indexWriter.close();
indexReader.close();
directory.close();
}
static String format(int i) {
return String.format(Locale.ROOT, "%06d", i);
}
}

View File

@ -16,8 +16,9 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package org.elasticsearch.index.search.child; package org.elasticsearch.index.query;
import org.elasticsearch.index.query.ScoreType;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Test; import org.junit.Test;

View File

@ -1,149 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.hamcrest.Description;
import org.hamcrest.StringDescription;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public abstract class AbstractChildTestCase extends ESSingleNodeTestCase {
/**
* The name of the field within the child type that stores a score to use in test queries.
* <p />
* Its type is {@code double}.
*/
protected static String CHILD_SCORE_NAME = "childScore";
static SearchContext createSearchContext(String indexName, String parentType, String childType) throws IOException {
Settings settings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_6_0)
.build();
IndexService indexService = createIndex(indexName, settings);
MapperService mapperService = indexService.mapperService();
// Parent/child parsers require that the parent and child type to be presented in mapping
// Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used
mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true, false);
mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true, false);
return createSearchContext(indexService);
}
static void assertBitSet(BitSet actual, BitSet expected, IndexSearcher searcher) throws IOException {
assertBitSet(new BitDocIdSet(actual), new BitDocIdSet(expected), searcher);
}
static void assertBitSet(BitDocIdSet actual, BitDocIdSet expected, IndexSearcher searcher) throws IOException {
if (!equals(expected, actual)) {
Description description = new StringDescription();
description.appendText(reason(actual, expected, searcher));
description.appendText("\nExpected: ");
description.appendValue(expected);
description.appendText("\n got: ");
description.appendValue(actual);
description.appendText("\n");
throw new java.lang.AssertionError(description.toString());
}
}
static boolean equals(BitDocIdSet expected, BitDocIdSet actual) {
if (actual == null && expected == null) {
return true;
} else if (actual == null || expected == null) {
return false;
}
BitSet actualBits = actual.bits();
BitSet expectedBits = expected.bits();
if (actualBits.length() != expectedBits.length()) {
return false;
}
for (int i = 0; i < expectedBits.length(); i++) {
if (expectedBits.get(i) != actualBits.get(i)) {
return false;
}
}
return true;
}
static String reason(BitDocIdSet actual, BitDocIdSet expected, IndexSearcher indexSearcher) throws IOException {
StringBuilder builder = new StringBuilder();
builder.append("expected cardinality:").append(expected.bits().cardinality()).append('\n');
DocIdSetIterator iterator = expected.iterator();
for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
builder.append("Expected doc[").append(doc).append("] with id value ").append(indexSearcher.doc(doc).get(UidFieldMapper.NAME)).append('\n');
}
builder.append("actual cardinality: ").append(actual.bits().cardinality()).append('\n');
iterator = actual.iterator();
for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
builder.append("Actual doc[").append(doc).append("] with id value ").append(indexSearcher.doc(doc).get(UidFieldMapper.NAME)).append('\n');
}
return builder.toString();
}
static void assertTopDocs(TopDocs actual, TopDocs expected) {
assertThat("actual.totalHits != expected.totalHits", actual.totalHits, equalTo(expected.totalHits));
assertThat("actual.getMaxScore() != expected.getMaxScore()", actual.getMaxScore(), equalTo(expected.getMaxScore()));
assertThat("actual.scoreDocs.length != expected.scoreDocs.length", actual.scoreDocs.length, equalTo(actual.scoreDocs.length));
for (int i = 0; i < actual.scoreDocs.length; i++) {
ScoreDoc actualHit = actual.scoreDocs[i];
ScoreDoc expectedHit = expected.scoreDocs[i];
assertThat("actualHit.doc != expectedHit.doc", actualHit.doc, equalTo(expectedHit.doc));
assertThat("actualHit.score != expectedHit.score", actualHit.score, equalTo(expectedHit.score));
}
}
static BitSetProducer wrapWithBitSetFilter(Filter filter) {
return SearchContext.current().bitsetFilterCache().getBitSetProducer(filter);
}
static Query parseQuery(QueryBuilder queryBuilder) throws IOException {
QueryParseContext context = new QueryParseContext(new Index("test"), SearchContext.current().queryParserService());
XContentParser parser = XContentHelper.createParser(queryBuilder.buildAsBytes());
context.reset(parser);
return context.parseInnerQuery();
}
}

View File

@ -1,50 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.search.NoopCollector;
import java.io.IOException;
class BitSetCollector extends NoopCollector {
final FixedBitSet result;
int docBase;
BitSetCollector(int topLevelMaxDoc) {
this.result = new FixedBitSet(topLevelMaxDoc);
}
@Override
public void collect(int doc) throws IOException {
result.set(docBase + doc);
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
docBase = context.docBase;
}
FixedBitSet getResult() {
return result;
}
}

View File

@ -1,295 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeSet;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.hamcrest.Matchers.equalTo;
public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
@BeforeClass
public static void before() throws IOException {
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@AfterClass
public static void after() throws IOException {
SearchContext current = SearchContext.current();
SearchContext.removeCurrent();
Releasables.close(current);
}
@Test
public void testBasicQuerySanities() {
Query childQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query);
}
@Test
public void testSimple() throws Exception {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
for (int parent = 1; parent <= 5; parent++) {
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", Integer.toString(parent)), Field.Store.NO));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
indexWriter.addDocument(document);
for (int child = 1; child <= 3; child++) {
document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", Integer.toString(parent * 3 + child)), Field.Store.NO));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", Integer.toString(parent)), Field.Store.NO));
document.add(new StringField("field1", "value" + child, Field.Store.NO));
indexWriter.addDocument(document);
}
}
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
IndexSearcher searcher = new IndexSearcher(indexReader);
((TestSearchContext) SearchContext.current()).setSearcher(
new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
);
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
int shortCircuitParentDocSet = random().nextInt(5);
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
ChildrenConstantScoreQuery query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet, null);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
searcher.search(query, collector);
FixedBitSet actualResult = collector.getResult();
assertThat(actualResult.cardinality(), equalTo(5));
indexWriter.close();
indexReader.close();
directory.close();
}
@Test
public void testRandom() throws Exception {
Directory directory = newDirectory();
final Random r = random();
final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueChildValues = scaledRandomIntBetween(100, 2000);
String[] childValues = new String[numUniqueChildValues];
for (int i = 0; i < numUniqueChildValues; i++) {
childValues[i] = Integer.toString(i);
}
IntHashSet filteredOrDeletedDocs = new IntHashSet();
int childDocId = 0;
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
ObjectObjectHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectHashMap<>();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
boolean filterMe = rarely();
String parent = Integer.toString(parentDocId);
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
if (markParentAsDeleted) {
filteredOrDeletedDocs.add(parentDocId);
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
filteredOrDeletedDocs.add(parentDocId);
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
final int numChildDocs = scaledRandomIntBetween(0, 100);
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
String childValue = childValues[random().nextInt(childValues.length)];
document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", Integer.toString(childDocId++)), Field.Store.NO));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
document.add(new StringField("field1", childValue, Field.Store.NO));
if (markChildAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
if (!markChildAsDeleted) {
NavigableSet<String> parentIds;
if (childValueToParentIds.containsKey(childValue)) {
parentIds = childValueToParentIds.get(childValue);
} else {
childValueToParentIds.put(childValue, parentIds = new TreeSet<>());
}
if (!markParentAsDeleted && !filterMe) {
parentIds.add(parent);
}
}
}
}
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.Searcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
// Simulate a parent update
if (random().nextBoolean()) {
final int numberOfUpdatableParents = numParentDocs - filteredOrDeletedDocs.size();
int numberOfUpdates = scaledRandomIntBetween(0, numberOfUpdatableParents);
for (int j = 0; j < numberOfUpdates; j++) {
int parentId;
do {
parentId = random().nextInt(numParentDocs);
} while (filteredOrDeletedDocs.contains(parentId));
String parentUid = Uid.createUid("parent", Integer.toString(parentId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, parentUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.Searcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
QueryBuilder queryBuilder;
if (random().nextBoolean()) {
queryBuilder = hasChildQuery("child", termQuery("field1", childValue))
.setShortCircuitCutoff(shortCircuitParentDocSet);
} else {
queryBuilder = constantScoreQuery(
hasChildQuery("child", termQuery("field1", childValue))
.setShortCircuitCutoff(shortCircuitParentDocSet)
);
}
// Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
queryBuilder = filteredQuery(queryBuilder, notQuery(termQuery("filter", "me")));
Query query = parseQuery(queryBuilder);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
searcher.search(query, collector);
FixedBitSet actualResult = collector.getResult();
FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc());
if (childValueToParentIds.containsKey(childValue)) {
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
if (terms != null) {
NavigableSet<String> parentIds = childValueToParentIds.get(childValue);
TermsEnum termsEnum = terms.iterator();
PostingsEnum docsEnum = null;
for (String id : parentIds) {
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("parent", id));
if (seekStatus == TermsEnum.SeekStatus.FOUND) {
docsEnum = termsEnum.postings(docsEnum, PostingsEnum.NONE);
final Bits liveDocs = slowLeafReader.getLiveDocs();
for (int doc = docsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.nextDoc()) {
if (liveDocs == null || liveDocs.get(doc)) {
break;
}
}
expectedResult.set(docsEnum.docID());
} else if (seekStatus == TermsEnum.SeekStatus.END) {
break;
}
}
}
}
assertBitSet(actualResult, expectedResult, searcher);
}
indexWriter.close();
indexReader.close();
directory.close();
}
}

View File

@ -1,397 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class ChildrenQueryTests extends AbstractChildTestCase {
@BeforeClass
public static void before() throws IOException {
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@AfterClass
public static void after() throws IOException {
SearchContext current = SearchContext.current();
SearchContext.removeCurrent();
Releasables.close(current);
}
@Test
public void testBasicQuerySanities() {
Query childQuery = new TermQuery(new Term("field", "value"));
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
int minChildren = random().nextInt(10);
int maxChildren = scaledRandomIntBetween(minChildren, 10);
Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren,
maxChildren, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query);
}
@Test
public void testRandom() throws Exception {
Directory directory = newDirectory();
final Random r = random();
final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueChildValues = scaledRandomIntBetween(100, 2000);
String[] childValues = new String[numUniqueChildValues];
for (int i = 0; i < numUniqueChildValues; i++) {
childValues[i] = Integer.toString(i);
}
IntHashSet filteredOrDeletedDocs = new IntHashSet();
int childDocId = 0;
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
ObjectObjectHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectHashMap<>();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
boolean filterMe = rarely();
String parent = Integer.toString(parentDocId);
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
if (markParentAsDeleted) {
filteredOrDeletedDocs.add(parentDocId);
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
filteredOrDeletedDocs.add(parentDocId);
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
int numChildDocs = scaledRandomIntBetween(0, 100);
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
String childValue = childValues[random().nextInt(childValues.length)];
document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", Integer.toString(childDocId++)), Field.Store.NO));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
document.add(new StringField("field1", childValue, Field.Store.NO));
if (markChildAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
if (!markChildAsDeleted) {
NavigableMap<String, FloatArrayList> parentIdToChildScores;
if (childValueToParentIds.containsKey(childValue)) {
parentIdToChildScores = childValueToParentIds.get(childValue);
} else {
childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<>());
}
if (!markParentAsDeleted && !filterMe) {
FloatArrayList childScores = parentIdToChildScores.get(parent);
if (childScores == null) {
parentIdToChildScores.put(parent, childScores = new FloatArrayList());
}
childScores.add(1f);
}
}
}
}
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.Searcher(
ChildrenQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
// Simulate a parent update
if (random().nextBoolean()) {
final int numberOfUpdatableParents = numParentDocs - filteredOrDeletedDocs.size();
int numberOfUpdates = RandomInts.randomIntBetween(random(), 0, Math.min(numberOfUpdatableParents, TEST_NIGHTLY ? 25 : 5));
for (int j = 0; j < numberOfUpdates; j++) {
int parentId;
do {
parentId = random().nextInt(numParentDocs);
} while (filteredOrDeletedDocs.contains(parentId));
String parentUid = Uid.createUid("parent", Integer.toString(parentId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, parentUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.Searcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
// leave min/max set to 0 half the time
int minChildren = random().nextInt(2) * scaledRandomIntBetween(0, 110);
int maxChildren = random().nextInt(2) * scaledRandomIntBetween(minChildren, 110);
QueryBuilder queryBuilder = hasChildQuery("child", constantScoreQuery(termQuery("field1", childValue)))
.scoreType(scoreType.name().toLowerCase(Locale.ENGLISH))
.minChildren(minChildren)
.maxChildren(maxChildren)
.setShortCircuitCutoff(shortCircuitParentDocSet);
// Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
queryBuilder = filteredQuery(queryBuilder, notQuery(termQuery("filter", "me")));
Query query = parseQuery(queryBuilder);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
int numHits = 1 + random().nextInt(25);
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits);
searcher.search(query, MultiCollector.wrap(collector, actualTopDocsCollector));
FixedBitSet actualResult = collector.getResult();
FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc());
TopScoreDocCollector expectedTopDocsCollector = TopScoreDocCollector.create(numHits);
if (childValueToParentIds.containsKey(childValue)) {
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
if (terms != null) {
NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.get(childValue);
TermsEnum termsEnum = terms.iterator();
PostingsEnum docsEnum = null;
for (Map.Entry<String, FloatArrayList> entry : parentIdToChildScores.entrySet()) {
int count = entry.getValue().elementsCount;
if (count >= minChildren && (maxChildren == 0 || count <= maxChildren)) {
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("parent", entry.getKey()));
if (seekStatus == TermsEnum.SeekStatus.FOUND) {
docsEnum = termsEnum.postings(docsEnum, PostingsEnum.NONE);
final Bits liveDocs = slowLeafReader.getLiveDocs();
for (int doc = docsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.nextDoc()) {
if (liveDocs == null || liveDocs.get(doc)) {
break;
}
}
expectedResult.set(docsEnum.docID());
scores[docsEnum.docID()] = new FloatArrayList(entry.getValue());
} else if (seekStatus == TermsEnum.SeekStatus.END) {
break;
}
}
}
}
MockScorer mockScorer = new MockScorer(scoreType);
final LeafCollector leafCollector = expectedTopDocsCollector.getLeafCollector(slowLeafReader.getContext());
leafCollector.setScorer(mockScorer);
for (int doc = expectedResult.nextSetBit(0); doc < slowLeafReader.maxDoc(); doc = doc + 1 >= expectedResult.length() ? DocIdSetIterator.NO_MORE_DOCS : expectedResult.nextSetBit(doc + 1)) {
mockScorer.scores = scores[doc];
leafCollector.collect(doc);
}
}
assertBitSet(actualResult, expectedResult, searcher);
assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs());
}
indexWriter.close();
indexReader.close();
directory.close();
}
@Test
public void testMinScoreMode() throws IOException {
assertScoreType(ScoreType.MIN);
}
@Test
public void testMaxScoreMode() throws IOException {
assertScoreType(ScoreType.MAX);
}
@Test
public void testAvgScoreMode() throws IOException {
assertScoreType(ScoreType.AVG);
}
@Test
public void testSumScoreMode() throws IOException {
assertScoreType(ScoreType.SUM);
}
/**
* Assert that the {@code scoreType} operates as expected and parents are found in the expected order.
* <p />
* This will use the test index's parent/child types to create parents with multiple children. Each child will have
* a randomly generated scored stored in {@link #CHILD_SCORE_NAME}, which is used to score based on the
* {@code scoreType} by using a {@link MockScorer} to determine the expected scores.
* @param scoreType The score type to use within the query to score parents relative to their children.
* @throws IOException if any unexpected error occurs
*/
private void assertScoreType(ScoreType scoreType) throws IOException {
SearchContext context = SearchContext.current();
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())));
// calculates the expected score per parent
MockScorer scorer = new MockScorer(scoreType);
scorer.scores = new FloatArrayList(10);
// number of parents to generate
int parentDocs = scaledRandomIntBetween(2, 10);
// unique child ID
int childDocId = 0;
// Parent ID to expected score
Map<String, Float> parentScores = new TreeMap<>();
// Add a few random parents to ensure that the children's score is appropriately taken into account
for (int parentDocId = 0; parentDocId < parentDocs; ++parentDocId) {
String parent = Integer.toString(parentDocId);
// Create the parent
Document parentDocument = new Document();
parentDocument.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
parentDocument.add(new StringField(IdFieldMapper.NAME, parent, Field.Store.YES));
parentDocument.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
// add the parent to the index
writer.addDocument(parentDocument);
int numChildDocs = scaledRandomIntBetween(1, 10);
// forget any parent's previous scores
scorer.scores.clear();
// associate children with the parent
for (int i = 0; i < numChildDocs; ++i) {
int childScore = random().nextInt(128);
Document childDocument = new Document();
childDocument.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", Integer.toString(childDocId++)), Field.Store.NO));
childDocument.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
// parent association:
childDocument.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
childDocument.add(new DoubleField(CHILD_SCORE_NAME, childScore, Field.Store.NO));
// remember the score to be calculated
scorer.scores.add(childScore);
// add the associated child to the index
writer.addDocument(childDocument);
}
// this score that should be returned for this parent
parentScores.put(parent, scorer.score());
}
writer.commit();
IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);
// setup to read the parent/child map
Engine.Searcher engineSearcher = new Engine.Searcher(ChildrenQueryTests.class.getSimpleName(), searcher);
((TestSearchContext)context).setSearcher(engineSearcher);
// child query that returns the score as the value of "childScore" for each child document, with the parent's score determined by the score type
QueryBuilder childQueryBuilder = functionScoreQuery(typeQuery("child")).add(new FieldValueFactorFunctionBuilder(CHILD_SCORE_NAME));
QueryBuilder queryBuilder = hasChildQuery("child", childQueryBuilder)
.scoreType(scoreType.name().toLowerCase(Locale.ENGLISH))
.setShortCircuitCutoff(parentDocs);
// Perform the search for the documents using the selected score type
Query query = parseQuery(queryBuilder);
TopDocs docs = searcher.search(query, parentDocs);
assertThat("Expected all parents", docs.totalHits, is(parentDocs));
// score should be descending (just a sanity check)
float topScore = docs.scoreDocs[0].score;
// ensure each score is returned as expected
for (int i = 0; i < parentDocs; ++i) {
ScoreDoc scoreDoc = docs.scoreDocs[i];
// get the ID from the document to get its expected score; remove it so we cannot double-count it
float score = parentScores.remove(reader.document(scoreDoc.doc).get(IdFieldMapper.NAME));
// expect exact match
assertThat("Unexpected score", scoreDoc.score, is(score));
assertThat("Not descending", score, lessThanOrEqualTo(topScore));
// it had better keep descending
topScore = score;
}
reader.close();
writer.close();
directory.close();
}
}

View File

@ -1,102 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
class MockScorer extends Scorer {
final ScoreType scoreType;
FloatArrayList scores;
MockScorer(ScoreType scoreType) {
super(null);
this.scoreType = scoreType;
}
@Override
public float score() throws IOException {
if (scoreType == ScoreType.NONE) {
return 1.0f;
}
float aggregateScore = 0;
// in the case of a min value, it can't start at 0 (the lowest score); in all cases, it doesn't hurt to use the
// first score, so we can safely use the first value by skipping it in the loop
if (scores.elementsCount != 0) {
aggregateScore = scores.buffer[0];
for (int i = 1; i < scores.elementsCount; i++) {
float score = scores.buffer[i];
switch (scoreType) {
case MIN:
if (aggregateScore > score) {
aggregateScore = score;
}
break;
case MAX:
if (aggregateScore < score) {
aggregateScore = score;
}
break;
case SUM:
case AVG:
aggregateScore += score;
break;
}
}
if (scoreType == ScoreType.AVG) {
aggregateScore /= scores.elementsCount;
}
}
return aggregateScore;
}
@Override
public int freq() throws IOException {
return 0;
}
@Override
public int docID() {
return 0;
}
@Override
public int nextDoc() throws IOException {
return 0;
}
@Override
public int advance(int target) throws IOException {
return 0;
}
@Override
public long cost() {
return 0;
}
}

View File

@ -1,236 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeSet;
import static org.elasticsearch.index.query.QueryBuilders.*;
/**
*/
public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
@BeforeClass
public static void before() throws IOException {
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@AfterClass
public static void after() throws IOException {
SearchContext current = SearchContext.current();
SearchContext.removeCurrent();
Releasables.close(current);
}
@Test
public void testBasicQuerySanities() {
Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")));
Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query);
}
@Test
public void testRandom() throws Exception {
Directory directory = newDirectory();
final Random r = random();
final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueParentValues = scaledRandomIntBetween(100, 2000);
String[] parentValues = new String[numUniqueParentValues];
for (int i = 0; i < numUniqueParentValues; i++) {
parentValues[i] = Integer.toString(i);
}
int childDocId = 0;
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
ObjectObjectHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectHashMap<>();
IntIntHashMap childIdToParentId = new IntIntHashMap();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
String parentValue = parentValues[random().nextInt(parentValues.length)];
String parent = Integer.toString(parentDocId);
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
document.add(new StringField("field1", parentValue, Field.Store.NO));
if (markParentAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
int numChildDocs = scaledRandomIntBetween(0, 100);
if (parentDocId == numParentDocs - 1 && childIdToParentId.isEmpty()) {
// ensure there is at least one child in the index
numChildDocs = Math.max(1, numChildDocs);
}
for (int i = 0; i < numChildDocs; i++) {
boolean markChildAsDeleted = rarely();
boolean filterMe = rarely();
String child = Integer.toString(childDocId++);
document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", child), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
if (markChildAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
if (!markParentAsDeleted) {
NavigableSet<String> childIds;
if (parentValueToChildDocIds.containsKey(parentValue)) {
childIds = parentValueToChildDocIds.get(parentValue);
} else {
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<>());
}
if (!markChildAsDeleted && !filterMe) {
childIdToParentId.put(Integer.valueOf(child), parentDocId);
childIds.add(child);
}
}
}
}
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.Searcher(
ParentConstantScoreQuery.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
// Simulate a child update
if (random().nextBoolean()) {
int numberOfUpdates = childIdToParentId.isEmpty() ? 0 : scaledRandomIntBetween(1, 25);
int[] childIds = childIdToParentId.keys().toArray();
for (int j = 0; j < numberOfUpdates; j++) {
int childId = childIds[random().nextInt(childIds.length)];
String childUid = Uid.createUid("child", Integer.toString(childId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, childUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, childUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
String parentUid = Uid.createUid("parent", Integer.toString(childIdToParentId.get(childId)));
document.add(new StringField(ParentFieldMapper.NAME, parentUid, Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.Searcher(
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
QueryBuilder queryBuilder;
if (random().nextBoolean()) {
queryBuilder = hasParentQuery("parent", termQuery("field1", parentValue));
} else {
queryBuilder = constantScoreQuery(hasParentQuery("parent", termQuery("field1", parentValue)));
}
// Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
queryBuilder = filteredQuery(queryBuilder, notQuery(termQuery("filter", "me")));
Query query = parseQuery(queryBuilder);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
searcher.search(query, collector);
FixedBitSet actualResult = collector.getResult();
FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc());
if (parentValueToChildDocIds.containsKey(parentValue)) {
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
if (terms != null) {
NavigableSet<String> childIds = parentValueToChildDocIds.get(parentValue);
TermsEnum termsEnum = terms.iterator();
PostingsEnum docsEnum = null;
for (String id : childIds) {
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("child", id));
if (seekStatus == TermsEnum.SeekStatus.FOUND) {
docsEnum = termsEnum.postings(docsEnum, PostingsEnum.NONE);
final Bits liveDocs = slowLeafReader.getLiveDocs();
for (int doc = docsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.nextDoc()) {
if (liveDocs == null || liveDocs.get(doc)) {
break;
}
}
expectedResult.set(docsEnum.docID());
} else if (seekStatus == TermsEnum.SeekStatus.END) {
break;
}
}
}
}
assertBitSet(actualResult, expectedResult, searcher);
}
indexWriter.close();
indexReader.close();
directory.close();
}
}

View File

@ -1,249 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap;
import static org.elasticsearch.index.query.QueryBuilders.*;
public class ParentQueryTests extends AbstractChildTestCase {
@BeforeClass
public static void before() throws IOException {
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
}
@AfterClass
public static void after() throws IOException {
SearchContext current = SearchContext.current();
SearchContext.removeCurrent();
Releasables.close(current);
}
@Test
public void testBasicQuerySanities() {
Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")));
Query query = new ParentQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query);
}
@Test
public void testRandom() throws Exception {
Directory directory = newDirectory();
final Random r = random();
final IndexWriterConfig iwc = LuceneTestCase.newIndexWriterConfig(r, new MockAnalyzer(r))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(scaledRandomIntBetween(16, 64)); // we might index a lot - don't go crazy here
RandomIndexWriter indexWriter = new RandomIndexWriter(r, directory, iwc);
int numUniqueParentValues = scaledRandomIntBetween(100, 2000);
String[] parentValues = new String[numUniqueParentValues];
for (int i = 0; i < numUniqueParentValues; i++) {
parentValues[i] = Integer.toString(i);
}
int childDocId = 0;
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
ObjectObjectHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectHashMap<>();
IntIntHashMap childIdToParentId = new IntIntHashMap();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
String parentValue = parentValues[random().nextInt(parentValues.length)];
String parent = Integer.toString(parentDocId);
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
document.add(new StringField("field1", parentValue, Field.Store.NO));
if (markParentAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
int numChildDocs = scaledRandomIntBetween(0, 100);
if (parentDocId == numParentDocs - 1 && childIdToParentId.isEmpty()) {
// ensure there is at least one child in the index
numChildDocs = Math.max(1, numChildDocs);
}
for (int i = 0; i < numChildDocs; i++) {
String child = Integer.toString(childDocId++);
boolean markChildAsDeleted = rarely();
boolean filterMe = rarely();
document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", child), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
if (markChildAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
if (!markParentAsDeleted) {
NavigableMap<String, Float> childIdToScore = parentValueToChildIds.getOrDefault(parentValue, null);
if (childIdToScore == null) {
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<>());
}
if (!markChildAsDeleted && !filterMe) {
assertFalse("child ["+ child + "] already has a score", childIdToScore.containsKey(child));
childIdToScore.put(child, 1f);
childIdToParentId.put(Integer.valueOf(child), parentDocId);
}
}
}
}
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.Searcher(
ParentQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
// Simulate a child update
if (random().nextBoolean()) {
int numberOfUpdates = childIdToParentId.isEmpty() ? 0 : scaledRandomIntBetween(1, 5);
int[] childIds = childIdToParentId.keys().toArray();
for (int j = 0; j < numberOfUpdates; j++) {
int childId = childIds[random().nextInt(childIds.length)];
String childUid = Uid.createUid("child", Integer.toString(childId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, childUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, childUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
String parentUid = Uid.createUid("parent", Integer.toString(childIdToParentId.get(childId)));
document.add(new StringField(ParentFieldMapper.NAME, parentUid, Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.Searcher(
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
QueryBuilder queryBuilder = hasParentQuery("parent", constantScoreQuery(termQuery("field1", parentValue)));
// Using a FQ, will invoke / test the Scorer#advance(..) and also let the Weight#scorer not get live docs as acceptedDocs
queryBuilder = filteredQuery(queryBuilder, notQuery(termQuery("filter", "me")));
Query query = parseQuery(queryBuilder);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
int numHits = 1 + random().nextInt(25);
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits);
searcher.search(query, MultiCollector.wrap(collector, actualTopDocsCollector));
FixedBitSet actualResult = collector.getResult();
FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc());
TopScoreDocCollector expectedTopDocsCollector = TopScoreDocCollector.create(numHits);
if (parentValueToChildIds.containsKey(parentValue)) {
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
if (terms != null) {
NavigableMap<String, Float> childIdsAndScore = parentValueToChildIds.get(parentValue);
TermsEnum termsEnum = terms.iterator();
PostingsEnum docsEnum = null;
for (Map.Entry<String, Float> entry : childIdsAndScore.entrySet()) {
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("child", entry.getKey()));
if (seekStatus == TermsEnum.SeekStatus.FOUND) {
docsEnum = termsEnum.postings(docsEnum, PostingsEnum.NONE);
final Bits liveDocs = slowLeafReader.getLiveDocs();
for (int doc = docsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.nextDoc()) {
if (liveDocs == null || liveDocs.get(doc)) {
break;
}
}
expectedResult.set(docsEnum.docID());
FloatArrayList s = scores[docsEnum.docID()];
if (s == null) {
scores[docsEnum.docID()] = s = new FloatArrayList(2);
}
s.add(entry.getValue());
} else if (seekStatus == TermsEnum.SeekStatus.END) {
break;
}
}
}
MockScorer mockScorer = new MockScorer(ScoreType.MAX);
mockScorer.scores = new FloatArrayList();
final LeafCollector leafCollector = expectedTopDocsCollector.getLeafCollector(slowLeafReader.getContext());
leafCollector.setScorer(mockScorer);
for (int doc = expectedResult.nextSetBit(0); doc < slowLeafReader.maxDoc(); doc = doc + 1 >= expectedResult.length() ? DocIdSetIterator.NO_MORE_DOCS : expectedResult.nextSetBit(doc + 1)) {
mockScorer.scores.clear();
mockScorer.scores.addAll(scores[doc]);
leafCollector.collect(doc);
}
}
assertBitSet(actualResult, expectedResult, searcher);
assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs());
}
indexWriter.close();
indexReader.close();
directory.close();
}
}

View File

@ -1,223 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.child;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.junit.Test;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.startsWith;
/**
*
*/
@ClusterScope(scope = Scope.SUITE)
public class ChildQuerySearchBwcIT extends ChildQuerySearchIT {
@Override
public Settings indexSettings() {
return settings(Version.V_1_6_0).put(super.indexSettings()).build();
}
public void testSelfReferentialIsForbidden() {
// we allowed this, but it was actually broken. The has_child/has_parent results were sometimes wrong...
assertAcked(prepareCreate("test").addMapping("type", "_parent", "type=type"));
}
@Test
public void testAdd_ParentFieldAfterIndexingParentDocButBeforeIndexingChildDoc() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.refresh_interval", -1)));
ensureGreen();
String parentId = "p1";
client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get();
refresh();
assertAcked(client().admin()
.indices()
.preparePutMapping("test")
.setType("child")
.setSource("_parent", "type=parent"));
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get();
client().admin().indices().prepareRefresh().get();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(hasChildQuery("child", termQuery("c_field", "1")))
.get();
assertHitCount(searchResponse, 1l);
assertSearchHits(searchResponse, parentId);
searchResponse = client().prepareSearch("test")
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.get();
assertHitCount(searchResponse, 1l);
assertSearchHits(searchResponse, parentId);
searchResponse = client().prepareSearch("test")
.setPostFilter(hasChildQuery("child", termQuery("c_field", "1")))
.get();
assertHitCount(searchResponse, 1l);
assertSearchHits(searchResponse, parentId);
searchResponse = client().prepareSearch("test")
.setPostFilter(hasParentQuery("parent", termQuery("p_field", "1")))
.get();
assertHitCount(searchResponse, 1l);
assertSearchHits(searchResponse, "c1");
searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score"))
.get();
assertHitCount(searchResponse, 1l);
assertSearchHits(searchResponse, "c1");
}
@Test
public void testExplainUsage() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
ensureGreen();
String parentId = "p1";
client().prepareIndex("test", "parent", parentId).setSource("p_field", "1").get();
client().prepareIndex("test", "child", "c1").setSource("c_field", "1").setParent(parentId).get();
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
.setExplain(true)
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.get();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
searchResponse = client().prepareSearch("test")
.setExplain(true)
.setQuery(hasParentQuery("parent", termQuery("p_field", "1")).scoreType("score"))
.get();
assertHitCount(searchResponse, 1l);
assertThat(searchResponse.getHits().getAt(0).explanation().getDescription(), equalTo("not implemented yet..."));
ExplainResponse explainResponse = client().prepareExplain("test", "parent", parentId)
.setQuery(hasChildQuery("child", termQuery("c_field", "1")).scoreType("max"))
.get();
assertThat(explainResponse.isExists(), equalTo(true));
// TODO: improve test once explanations are actually implemented
assertThat(explainResponse.getExplanation().toString(), startsWith("1.0 ="));
}
@Test
public void testParentFieldDataCacheBug() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder().put(indexSettings())
.put("index.refresh_interval", -1)) // Disable automatic refresh, so that the _parent doesn't get warmed
.addMapping("parent", jsonBuilder().startObject().startObject("parent")
.startObject("properties")
.startObject("p_field")
.field("type", "string")
.startObject("fielddata")
.field(FieldDataType.FORMAT_KEY, MappedFieldType.Loading.LAZY)
.endObject()
.endObject()
.endObject().endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "parent", "p0").setSource("p_field", "p_value0").get();
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get();
refresh();
// No _parent field yet, there shouldn't be anything in the field data for _parent field
IndicesStatsResponse indicesStatsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).get();
assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
// Now add mapping + children
client().admin().indices().preparePutMapping("test").setType("child")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("child")
.startObject("_parent")
.field("type", "parent")
.endObject()
.startObject("properties")
.startObject("c_field")
.field("type", "string")
.startObject("fielddata")
.field(FieldDataType.FORMAT_KEY, MappedFieldType.Loading.LAZY)
.endObject()
.endObject()
.endObject().endObject().endObject())
.get();
// index simple data
client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get();
client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get();
client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get();
client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get();
refresh();
indicesStatsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).setFieldDataFields("_parent").get();
assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
assertThat(indicesStatsResponse.getTotal().getFieldData().getFields().get("_parent"), greaterThan(0l));
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"))))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
indicesStatsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).setFieldDataFields("_parent").get();
assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
assertThat(indicesStatsResponse.getTotal().getFieldData().getFields().get("_parent"), greaterThan(0l));
ClearIndicesCacheResponse clearCacheResponse = client().admin().indices().prepareClearCache("test").setFieldDataCache(true).get();
assertNoFailures(clearCacheResponse);
assertAllSuccessful(clearCacheResponse);
indicesStatsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).setFieldDataFields("_parent").get();
assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
assertThat(indicesStatsResponse.getTotal().getFieldData().getFields().get("_parent"), equalTo(0l));
}
}

View File

@ -30,12 +30,11 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.cache.IndexCacheModule; import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.HasChildQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.index.query.ScoreType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -1573,7 +1572,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
return indexBuilders; return indexBuilders;
} }
private SearchResponse minMaxQuery(String scoreType, int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException { private SearchResponse minMaxQuery(String scoreType, int minChildren, int maxChildren) throws SearchPhaseExecutionException {
return client() return client()
.prepareSearch("test") .prepareSearch("test")
.setQuery( .setQuery(
@ -1584,16 +1583,16 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
.add(QueryBuilders.matchAllQuery(), weightFactorFunction(1)) .add(QueryBuilders.matchAllQuery(), weightFactorFunction(1))
.add(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1)) .add(QueryBuilders.termQuery("foo", "three"), weightFactorFunction(1))
.add(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))).scoreType(scoreType) .add(QueryBuilders.termQuery("foo", "four"), weightFactorFunction(1))).scoreType(scoreType)
.minChildren(minChildren).maxChildren(maxChildren).setShortCircuitCutoff(cutoff)) .minChildren(minChildren).maxChildren(maxChildren))
.addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); .addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get();
} }
private SearchResponse minMaxFilter(int minChildren, int maxChildren, int cutoff) throws SearchPhaseExecutionException { private SearchResponse minMaxFilter(int minChildren, int maxChildren) throws SearchPhaseExecutionException {
return client() return client()
.prepareSearch("test") .prepareSearch("test")
.setQuery( .setQuery(
QueryBuilders.constantScoreQuery(QueryBuilders.hasChildQuery("child", termQuery("foo", "two")) QueryBuilders.constantScoreQuery(QueryBuilders.hasChildQuery("child", termQuery("foo", "two"))
.minChildren(minChildren).maxChildren(maxChildren).setShortCircuitCutoff(cutoff))) .minChildren(minChildren).maxChildren(maxChildren)))
.addSort("id", SortOrder.ASC).setTrackScores(true).get(); .addSort("id", SortOrder.ASC).setTrackScores(true).get();
} }
@ -1606,10 +1605,9 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0])); indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0]));
SearchResponse response; SearchResponse response;
int cutoff = getRandom().nextInt(4);
// Score mode = NONE // Score mode = NONE
response = minMaxQuery("none", 0, 0, cutoff); response = minMaxQuery("none", 0, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1619,7 +1617,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("none", 1, 0, cutoff); response = minMaxQuery("none", 1, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1629,7 +1627,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("none", 2, 0, cutoff); response = minMaxQuery("none", 2, 0);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
@ -1637,17 +1635,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("4")); assertThat(response.getHits().hits()[1].id(), equalTo("4"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxQuery("none", 3, 0, cutoff); response = minMaxQuery("none", 3, 0);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f)); assertThat(response.getHits().hits()[0].score(), equalTo(1f));
response = minMaxQuery("none", 4, 0, cutoff); response = minMaxQuery("none", 4, 0);
assertThat(response.getHits().totalHits(), equalTo(0l)); assertThat(response.getHits().totalHits(), equalTo(0l));
response = minMaxQuery("none", 0, 4, cutoff); response = minMaxQuery("none", 0, 4);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1657,7 +1655,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("none", 0, 3, cutoff); response = minMaxQuery("none", 0, 3);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1667,7 +1665,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("none", 0, 2, cutoff); response = minMaxQuery("none", 0, 2);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1675,21 +1673,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].id(), equalTo("3"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxQuery("none", 2, 2, cutoff); response = minMaxQuery("none", 2, 2);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f)); assertThat(response.getHits().hits()[0].score(), equalTo(1f));
try { try {
response = minMaxQuery("none", 3, 2, cutoff); response = minMaxQuery("none", 3, 2);
fail(); fail();
} catch (SearchPhaseExecutionException e) { } catch (SearchPhaseExecutionException e) {
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
} }
// Score mode = SUM // Score mode = SUM
response = minMaxQuery("sum", 0, 0, cutoff); response = minMaxQuery("sum", 0, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1699,7 +1697,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("sum", 1, 0, cutoff); response = minMaxQuery("sum", 1, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1709,7 +1707,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("sum", 2, 0, cutoff); response = minMaxQuery("sum", 2, 0);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1717,17 +1715,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].id(), equalTo("3"));
assertThat(response.getHits().hits()[1].score(), equalTo(3f)); assertThat(response.getHits().hits()[1].score(), equalTo(3f));
response = minMaxQuery("sum", 3, 0, cutoff); response = minMaxQuery("sum", 3, 0);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(6f)); assertThat(response.getHits().hits()[0].score(), equalTo(6f));
response = minMaxQuery("sum", 4, 0, cutoff); response = minMaxQuery("sum", 4, 0);
assertThat(response.getHits().totalHits(), equalTo(0l)); assertThat(response.getHits().totalHits(), equalTo(0l));
response = minMaxQuery("sum", 0, 4, cutoff); response = minMaxQuery("sum", 0, 4);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1737,7 +1735,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("sum", 0, 3, cutoff); response = minMaxQuery("sum", 0, 3);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1747,7 +1745,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("sum", 0, 2, cutoff); response = minMaxQuery("sum", 0, 2);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
@ -1755,21 +1753,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].id(), equalTo("2"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxQuery("sum", 2, 2, cutoff); response = minMaxQuery("sum", 2, 2);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f)); assertThat(response.getHits().hits()[0].score(), equalTo(3f));
try { try {
response = minMaxQuery("sum", 3, 2, cutoff); response = minMaxQuery("sum", 3, 2);
fail(); fail();
} catch (SearchPhaseExecutionException e) { } catch (SearchPhaseExecutionException e) {
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
} }
// Score mode = MAX // Score mode = MAX
response = minMaxQuery("max", 0, 0, cutoff); response = minMaxQuery("max", 0, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1779,7 +1777,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("max", 1, 0, cutoff); response = minMaxQuery("max", 1, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1789,7 +1787,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("max", 2, 0, cutoff); response = minMaxQuery("max", 2, 0);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1797,17 +1795,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].id(), equalTo("3"));
assertThat(response.getHits().hits()[1].score(), equalTo(2f)); assertThat(response.getHits().hits()[1].score(), equalTo(2f));
response = minMaxQuery("max", 3, 0, cutoff); response = minMaxQuery("max", 3, 0);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(3f)); assertThat(response.getHits().hits()[0].score(), equalTo(3f));
response = minMaxQuery("max", 4, 0, cutoff); response = minMaxQuery("max", 4, 0);
assertThat(response.getHits().totalHits(), equalTo(0l)); assertThat(response.getHits().totalHits(), equalTo(0l));
response = minMaxQuery("max", 0, 4, cutoff); response = minMaxQuery("max", 0, 4);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1817,7 +1815,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("max", 0, 3, cutoff); response = minMaxQuery("max", 0, 3);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1827,7 +1825,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("max", 0, 2, cutoff); response = minMaxQuery("max", 0, 2);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
@ -1835,21 +1833,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].id(), equalTo("2"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxQuery("max", 2, 2, cutoff); response = minMaxQuery("max", 2, 2);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f)); assertThat(response.getHits().hits()[0].score(), equalTo(2f));
try { try {
response = minMaxQuery("max", 3, 2, cutoff); response = minMaxQuery("max", 3, 2);
fail(); fail();
} catch (SearchPhaseExecutionException e) { } catch (SearchPhaseExecutionException e) {
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
} }
// Score mode = AVG // Score mode = AVG
response = minMaxQuery("avg", 0, 0, cutoff); response = minMaxQuery("avg", 0, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1859,7 +1857,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("avg", 1, 0, cutoff); response = minMaxQuery("avg", 1, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1869,7 +1867,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("avg", 2, 0, cutoff); response = minMaxQuery("avg", 2, 0);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1877,17 +1875,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].id(), equalTo("3"));
assertThat(response.getHits().hits()[1].score(), equalTo(1.5f)); assertThat(response.getHits().hits()[1].score(), equalTo(1.5f));
response = minMaxQuery("avg", 3, 0, cutoff); response = minMaxQuery("avg", 3, 0);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(2f)); assertThat(response.getHits().hits()[0].score(), equalTo(2f));
response = minMaxQuery("avg", 4, 0, cutoff); response = minMaxQuery("avg", 4, 0);
assertThat(response.getHits().totalHits(), equalTo(0l)); assertThat(response.getHits().totalHits(), equalTo(0l));
response = minMaxQuery("avg", 0, 4, cutoff); response = minMaxQuery("avg", 0, 4);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1897,7 +1895,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("avg", 0, 3, cutoff); response = minMaxQuery("avg", 0, 3);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
@ -1907,7 +1905,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("2")); assertThat(response.getHits().hits()[2].id(), equalTo("2"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxQuery("avg", 0, 2, cutoff); response = minMaxQuery("avg", 0, 2);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
@ -1915,21 +1913,21 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("2")); assertThat(response.getHits().hits()[1].id(), equalTo("2"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxQuery("avg", 2, 2, cutoff); response = minMaxQuery("avg", 2, 2);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1.5f)); assertThat(response.getHits().hits()[0].score(), equalTo(1.5f));
try { try {
response = minMaxQuery("avg", 3, 2, cutoff); response = minMaxQuery("avg", 3, 2);
fail(); fail();
} catch (SearchPhaseExecutionException e) { } catch (SearchPhaseExecutionException e) {
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
} }
// HasChildFilter // HasChildFilter
response = minMaxFilter(0, 0, cutoff); response = minMaxFilter(0, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1939,7 +1937,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxFilter(1, 0, cutoff); response = minMaxFilter(1, 0);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1949,7 +1947,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxFilter(2, 0, cutoff); response = minMaxFilter(2, 0);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
@ -1957,17 +1955,17 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("4")); assertThat(response.getHits().hits()[1].id(), equalTo("4"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxFilter(3, 0, cutoff); response = minMaxFilter(3, 0);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("4")); assertThat(response.getHits().hits()[0].id(), equalTo("4"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f)); assertThat(response.getHits().hits()[0].score(), equalTo(1f));
response = minMaxFilter(4, 0, cutoff); response = minMaxFilter(4, 0);
assertThat(response.getHits().totalHits(), equalTo(0l)); assertThat(response.getHits().totalHits(), equalTo(0l));
response = minMaxFilter(0, 4, cutoff); response = minMaxFilter(0, 4);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1977,7 +1975,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxFilter(0, 3, cutoff); response = minMaxFilter(0, 3);
assertThat(response.getHits().totalHits(), equalTo(3l)); assertThat(response.getHits().totalHits(), equalTo(3l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1987,7 +1985,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[2].id(), equalTo("4")); assertThat(response.getHits().hits()[2].id(), equalTo("4"));
assertThat(response.getHits().hits()[2].score(), equalTo(1f)); assertThat(response.getHits().hits()[2].score(), equalTo(1f));
response = minMaxFilter(0, 2, cutoff); response = minMaxFilter(0, 2);
assertThat(response.getHits().totalHits(), equalTo(2l)); assertThat(response.getHits().totalHits(), equalTo(2l));
assertThat(response.getHits().hits()[0].id(), equalTo("2")); assertThat(response.getHits().hits()[0].id(), equalTo("2"));
@ -1995,14 +1993,14 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().hits()[1].id(), equalTo("3")); assertThat(response.getHits().hits()[1].id(), equalTo("3"));
assertThat(response.getHits().hits()[1].score(), equalTo(1f)); assertThat(response.getHits().hits()[1].score(), equalTo(1f));
response = minMaxFilter(2, 2, cutoff); response = minMaxFilter(2, 2);
assertThat(response.getHits().totalHits(), equalTo(1l)); assertThat(response.getHits().totalHits(), equalTo(1l));
assertThat(response.getHits().hits()[0].id(), equalTo("3")); assertThat(response.getHits().hits()[0].id(), equalTo("3"));
assertThat(response.getHits().hits()[0].score(), equalTo(1f)); assertThat(response.getHits().hits()[0].score(), equalTo(1f));
try { try {
response = minMaxFilter(3, 2, cutoff); response = minMaxFilter(3, 2);
fail(); fail();
} catch (SearchPhaseExecutionException e) { } catch (SearchPhaseExecutionException e) {
assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'")); assertThat(e.toString(), containsString("[has_child] 'max_children' is less than 'min_children'"));
@ -2033,7 +2031,6 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder queryBuilder) { static HasChildQueryBuilder hasChildQuery(String type, QueryBuilder queryBuilder) {
HasChildQueryBuilder hasChildQueryBuilder = QueryBuilders.hasChildQuery(type, queryBuilder); HasChildQueryBuilder hasChildQueryBuilder = QueryBuilders.hasChildQuery(type, queryBuilder);
hasChildQueryBuilder.setShortCircuitCutoff(randomInt(10));
return hasChildQueryBuilder; return hasChildQueryBuilder;
} }

View File

@ -1,249 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.child;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.MergePolicyConfig;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.child.ChildQuerySearchIT.hasChildQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
public class ParentFieldLoadingBwcIT extends ESIntegTestCase {
private final Settings indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexShard.INDEX_REFRESH_INTERVAL, -1)
// We never want merges in this test to ensure we have two segments for the last validation
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_6_0)
.build();
public void testParentFieldDataCacheBug() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder().put(indexSettings)
.put("index.refresh_interval", -1)) // Disable automatic refresh, so that the _parent doesn't get warmed
.addMapping("parent", XContentFactory.jsonBuilder().startObject().startObject("parent")
.startObject("properties")
.startObject("p_field")
.field("type", "string")
.startObject("fielddata")
.field(FieldDataType.FORMAT_KEY, MappedFieldType.Loading.LAZY)
.endObject()
.endObject()
.endObject().endObject().endObject())
.addMapping("child", XContentFactory.jsonBuilder().startObject().startObject("child")
.startObject("_parent")
.field("type", "parent")
.endObject()
.startObject("properties")
.startObject("c_field")
.field("type", "string")
.startObject("fielddata")
.field(FieldDataType.FORMAT_KEY, MappedFieldType.Loading.LAZY)
.endObject()
.endObject()
.endObject().endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "parent", "p0").setSource("p_field", "p_value0").get();
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get();
client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get();
client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get();
client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get();
client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get();
refresh();
IndicesStatsResponse statsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).setFieldDataFields("_parent").get();
assertThat(statsResponse.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
assertThat(statsResponse.getTotal().getFieldData().getFields().get("_parent"), greaterThan(0l));
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"))))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
statsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).setFieldDataFields("_parent").get();
assertThat(statsResponse.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
assertThat(statsResponse.getTotal().getFieldData().getFields().get("_parent"), greaterThan(0l));
ClearIndicesCacheResponse clearCacheResponse = client().admin().indices().prepareClearCache("test").setFieldDataCache(true).get();
assertNoFailures(clearCacheResponse);
assertAllSuccessful(clearCacheResponse);
statsResponse = client().admin().indices()
.prepareStats("test").setFieldData(true).setFieldDataFields("_parent").get();
assertThat(statsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
assertThat(statsResponse.getTotal().getFieldData().getFields().get("_parent"), equalTo(0l));
}
public void testEagerParentFieldLoading() throws Exception {
logger.info("testing lazy loading...");
assertAcked(prepareCreate("test")
.setSettings(indexSettings)
.addMapping("parent")
.addMapping("child", childMapping(MappedFieldType.Loading.LAZY)));
ensureGreen();
client().prepareIndex("test", "parent", "1").setSource("{}").get();
client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get();
refresh();
IndicesStatsResponse r = client().admin().indices().prepareStats("test").setFieldData(true).setFieldDataFields("*").get();
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(0l));
logger.info("testing default loading...");
assertAcked(client().admin().indices().prepareDelete("test").get());
assertAcked(prepareCreate("test")
.setSettings(indexSettings)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
ensureGreen();
client().prepareIndex("test", "parent", "1").setSource("{}").get();
client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get();
refresh();
response = client().admin().cluster().prepareClusterStats().get();
long fielddataSizeDefault = response.getIndicesStats().getFieldData().getMemorySizeInBytes();
assertThat(fielddataSizeDefault, greaterThan(0l));
logger.info("testing eager loading...");
assertAcked(client().admin().indices().prepareDelete("test").get());
assertAcked(prepareCreate("test")
.setSettings(indexSettings)
.addMapping("parent")
.addMapping("child", childMapping(MappedFieldType.Loading.EAGER)));
ensureGreen();
client().prepareIndex("test", "parent", "1").setSource("{}").get();
client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get();
refresh();
response = client().admin().cluster().prepareClusterStats().get();
assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), equalTo(fielddataSizeDefault));
logger.info("testing eager global ordinals loading...");
assertAcked(client().admin().indices().prepareDelete("test").get());
assertAcked(prepareCreate("test")
.setSettings(indexSettings)
.addMapping("parent")
.addMapping("child", childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS)));
ensureGreen();
// Need to do 2 separate refreshes, otherwise we have 1 segment and then we can't measure if global ordinals
// is loaded by the size of the field data cache, because global ordinals on 1 segment shards takes no extra memory.
client().prepareIndex("test", "parent", "1").setSource("{}").get();
refresh();
client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get();
refresh();
response = client().admin().cluster().prepareClusterStats().get();
assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(fielddataSizeDefault));
}
public void testChangingEagerParentFieldLoadingAtRuntime() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(indexSettings)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
ensureGreen();
client().prepareIndex("test", "parent", "1").setSource("{}").get();
client().prepareIndex("test", "child", "1").setParent("1").setSource("{}").get();
refresh();
ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get();
long fielddataSizeDefault = response.getIndicesStats().getFieldData().getMemorySizeInBytes();
assertThat(fielddataSizeDefault, greaterThan(0l));
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child")
.setSource(childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))
.setUpdateAllTypes(true)
.get();
assertAcked(putMappingResponse);
assertBusy(new Runnable() {
@Override
public void run() {
ClusterState clusterState = internalCluster().clusterService().state();
ShardRouting shardRouting = clusterState.routingTable().index("test").shard(0).getShards().get(0);
String nodeName = clusterState.getNodes().get(shardRouting.currentNodeId()).getName();
boolean verified = false;
IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeName);
IndexService indexService = indicesService.indexService("test");
if (indexService != null) {
MapperService mapperService = indexService.mapperService();
DocumentMapper documentMapper = mapperService.documentMapper("child");
if (documentMapper != null) {
verified = documentMapper.parentFieldMapper().getChildJoinFieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS;
}
}
assertTrue(verified);
}
});
// Need to add a new doc otherwise the refresh doesn't trigger a new searcher
// Because it ends up in its own segment, but isn't of type parent or child, this doc doesn't contribute to the size of the fielddata cache
client().prepareIndex("test", "dummy", "dummy").setSource("{}").get();
refresh();
response = client().admin().cluster().prepareClusterStats().get();
assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(fielddataSizeDefault));
}
private XContentBuilder childMapping(MappedFieldType.Loading loading) throws IOException {
return jsonBuilder().startObject().startObject("child").startObject("_parent")
.field("type", "parent")
.startObject("fielddata").field(MappedFieldType.Loading.KEY, loading).endObject()
.endObject().endObject().endObject();
}
}

View File

@ -63,3 +63,12 @@ Scroll requests sorted by `_doc` have been optimized to more efficiently resume
from where the previous request stopped, so this will have the same performance from where the previous request stopped, so this will have the same performance
characteristics as the former `scan` search type. characteristics as the former `scan` search type.
=== Parent/Child changes
The `children` aggregation, parent child inner hits and `has_child` and `has_parent` queries will not work on indices
with `_parent` field mapping created before version `2.0.0`. The data of these indices need to be re-indexed into a new index.
The format of the join between parent and child documents have changed with the `2.0.0` release. The old
format can't read from version `3.0.0` and onwards. The new format allows for a much more efficient and
scalable join between parent and child documents and the join data structures are stored on on disk
data structures as opposed as before the join data structures were stored in the jvm heap space.