Cut p/c queries (has_child and has_parent queries) over to use global ordinals instead of being bytes values based.

Closes #5846
This commit is contained in:
Martijn van Groningen 2014-04-16 02:23:26 +07:00
parent fc3efda6af
commit 0f23485a3c
12 changed files with 1028 additions and 859 deletions

View File

@ -20,45 +20,33 @@ package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.index.fielddata.ordinals.InternalGlobalOrdinalsBuilder.OrdinalMappingSource;
import org.elasticsearch.index.fielddata.plain.AtomicFieldDataWithOrdinalsTermsEnum;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.RamUsage;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.search.MultiValueMode;
/**
* {@link IndexFieldData} impl based on global ordinals.
* {@link IndexFieldData} base class for concrete global ordinals implementations.
*/
public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexFieldData.WithOrdinals, RamUsage {
public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexFieldData.WithOrdinals, RamUsage {
private final FieldMapper.Names fieldNames;
private final FieldDataType fieldDataType;
private final Atomic[] atomicReaders;
private final long memorySizeInBytes;
public GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicFieldData.WithOrdinals[] segmentAfd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource[] segmentOrdToGlobalOrds, long memorySizeInBytes) {
protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) {
super(index, settings);
this.fieldNames = fieldNames;
this.fieldDataType = fieldDataType;
this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) {
atomicReaders[i] = new Atomic(segmentAfd[i], globalOrdToFirstSegment, globalOrdToFirstSegmentDelta, segmentOrdToGlobalOrds[i]);
}
this.memorySizeInBytes = memorySizeInBytes;
}
@Override
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
return atomicReaders[context.ord];
}
@Override
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) throws Exception {
return load(context);
@ -109,86 +97,4 @@ public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent i
return memorySizeInBytes;
}
private final class Atomic implements AtomicFieldData.WithOrdinals {
private final AtomicFieldData.WithOrdinals afd;
private final OrdinalMappingSource segmentOrdToGlobalOrdLookup;
private final LongValues globalOrdToFirstSegment;
private final LongValues globalOrdToFirstSegmentDelta;
private Atomic(WithOrdinals afd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource segmentOrdToGlobalOrdLookup) {
this.afd = afd;
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
this.globalOrdToFirstSegment = globalOrdToFirstSegment;
this.globalOrdToFirstSegmentDelta = globalOrdToFirstSegmentDelta;
}
@Override
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
BytesValues.WithOrdinals values = afd.getBytesValues(false);
Ordinals.Docs segmentOrdinals = values.ordinals();
final Ordinals.Docs globalOrdinals;
if (segmentOrdToGlobalOrdLookup != null) {
globalOrdinals = segmentOrdToGlobalOrdLookup.globalOrdinals(segmentOrdinals);
} else {
globalOrdinals = segmentOrdinals;
}
final BytesValues.WithOrdinals[] bytesValues = new BytesValues.WithOrdinals[atomicReaders.length];
for (int i = 0; i < bytesValues.length; i++) {
bytesValues[i] = atomicReaders[i].afd.getBytesValues(false);
}
return new BytesValues.WithOrdinals(globalOrdinals) {
int readerIndex;
@Override
public BytesRef getValueByOrd(long globalOrd) {
final long segmentOrd = globalOrd - globalOrdToFirstSegmentDelta.get(globalOrd);
readerIndex = (int) globalOrdToFirstSegment.get(globalOrd);
return bytesValues[readerIndex].getValueByOrd(segmentOrd);
}
@Override
public BytesRef copyShared() {
return bytesValues[readerIndex].copyShared();
}
@Override
public int currentValueHash() {
return bytesValues[readerIndex].currentValueHash();
}
};
}
@Override
public boolean isMultiValued() {
return afd.isMultiValued();
}
@Override
public long getNumberUniqueValues() {
return afd.getNumberUniqueValues();
}
@Override
public long getMemorySizeInBytes() {
return afd.getMemorySizeInBytes();
}
@Override
public ScriptDocValues getScriptValues() {
throw new UnsupportedOperationException("Script values not supported on global ordinals");
}
@Override
public TermsEnum getTermsEnum() {
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
}
@Override
public void close() {
}
}
}

View File

@ -112,7 +112,7 @@ public class InternalGlobalOrdinalsBuilder extends AbstractIndexComponent implem
(System.currentTimeMillis() - startTime)
);
}
return new GlobalOrdinalsIndexFieldData(indexFieldData.index(), settings, indexFieldData.getFieldNames(),
return new InternalGlobalOrdinalsIndexFieldData(indexFieldData.index(), settings, indexFieldData.getFieldNames(),
fieldDataType, withOrdinals, globalOrdToFirstSegment, globalOrdToFirstSegmentDelta,
segmentOrdToGlobalOrdLookups, memorySizeInBytes
);

View File

@ -0,0 +1,137 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.ordinals.InternalGlobalOrdinalsBuilder.OrdinalMappingSource;
import org.elasticsearch.index.fielddata.plain.AtomicFieldDataWithOrdinalsTermsEnum;
import org.elasticsearch.index.mapper.FieldMapper;
/**
* {@link org.elasticsearch.index.fielddata.IndexFieldData} impl based on global ordinals.
*/
final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData {
private final Atomic[] atomicReaders;
InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicFieldData.WithOrdinals[] segmentAfd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource[] segmentOrdToGlobalOrds, long memorySizeInBytes) {
super(index, settings, fieldNames, fieldDataType, memorySizeInBytes);
this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) {
atomicReaders[i] = new Atomic(segmentAfd[i], globalOrdToFirstSegment, globalOrdToFirstSegmentDelta, segmentOrdToGlobalOrds[i]);
}
}
@Override
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
return atomicReaders[context.ord];
}
private final class Atomic implements AtomicFieldData.WithOrdinals {
private final WithOrdinals afd;
private final OrdinalMappingSource segmentOrdToGlobalOrdLookup;
private final LongValues globalOrdToFirstSegment;
private final LongValues globalOrdToFirstSegmentDelta;
private Atomic(WithOrdinals afd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource segmentOrdToGlobalOrdLookup) {
this.afd = afd;
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
this.globalOrdToFirstSegment = globalOrdToFirstSegment;
this.globalOrdToFirstSegmentDelta = globalOrdToFirstSegmentDelta;
}
@Override
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
BytesValues.WithOrdinals values = afd.getBytesValues(false);
Ordinals.Docs segmentOrdinals = values.ordinals();
final Ordinals.Docs globalOrdinals;
if (segmentOrdToGlobalOrdLookup != null) {
globalOrdinals = segmentOrdToGlobalOrdLookup.globalOrdinals(segmentOrdinals);
} else {
globalOrdinals = segmentOrdinals;
}
final BytesValues.WithOrdinals[] bytesValues = new BytesValues.WithOrdinals[atomicReaders.length];
for (int i = 0; i < bytesValues.length; i++) {
bytesValues[i] = atomicReaders[i].afd.getBytesValues(false);
}
return new BytesValues.WithOrdinals(globalOrdinals) {
int readerIndex;
@Override
public BytesRef getValueByOrd(long globalOrd) {
final long segmentOrd = globalOrd - globalOrdToFirstSegmentDelta.get(globalOrd);
readerIndex = (int) globalOrdToFirstSegment.get(globalOrd);
return bytesValues[readerIndex].getValueByOrd(segmentOrd);
}
@Override
public BytesRef copyShared() {
return bytesValues[readerIndex].copyShared();
}
@Override
public int currentValueHash() {
return bytesValues[readerIndex].currentValueHash();
}
};
}
@Override
public boolean isMultiValued() {
return afd.isMultiValued();
}
@Override
public long getNumberUniqueValues() {
return afd.getNumberUniqueValues();
}
@Override
public long getMemorySizeInBytes() {
return afd.getMemorySizeInBytes();
}
@Override
public ScriptDocValues getScriptValues() {
throw new UnsupportedOperationException("Script values not supported on global ordinals");
}
@Override
public TermsEnum getTermsEnum() {
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
}
@Override
public void close() {
}
}
}

View File

@ -122,6 +122,10 @@ public class ParentChildAtomicFieldData implements AtomicFieldData {
}
}
public WithOrdinals getAtomicFieldData(String type) {
return typeToIds.get(type);
}
@Override
public ScriptDocValues getScriptValues() {
return new ScriptDocValues.Strings(getBytesValues(false));

View File

@ -25,6 +25,8 @@ import org.apache.lucene.index.*;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PagedBytes;
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.breaker.MemoryCircuitBreaker;
import org.elasticsearch.common.collect.ImmutableOpenMap;
@ -32,8 +34,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -44,6 +46,7 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
import java.util.NavigableSet;
@ -57,6 +60,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
private final NavigableSet<BytesRef> parentTypes;
private final CircuitBreakerService breakerService;
private final GlobalOrdinalsBuilder globalOrdinalsBuilder;
// If child type (a type with _parent field) is added or removed, we want to make sure modifications don't happen
// while loading.
@ -64,10 +68,11 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
CircuitBreakerService breakerService) {
CircuitBreakerService breakerService, GlobalOrdinalsBuilder globalOrdinalsBuilder) {
super(index, indexSettings, fieldNames, fieldDataType, cache);
parentTypes = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
this.breakerService = breakerService;
this.globalOrdinalsBuilder = globalOrdinalsBuilder;
for (DocumentMapper documentMapper : mapperService) {
beforeCreate(documentMapper);
}
@ -155,6 +160,12 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
}
}
public WithOrdinals getGlobalParentChild(String type, IndexReader indexReader) {
ParentTypesGlobalOrdinalsLoading loading = new ParentTypesGlobalOrdinalsLoading();
ParentChildGlobalOrdinalsIndexFieldData holder = (ParentChildGlobalOrdinalsIndexFieldData) loading.loadGlobal(indexReader);
return holder.type(type);
}
@Override
public void beforeCreate(DocumentMapper mapper) {
synchronized (lock) {
@ -198,7 +209,8 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper,
IndexFieldDataCache cache, CircuitBreakerService breakerService,
MapperService mapperService, GlobalOrdinalsBuilder globalOrdinalBuilder) {
return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, mapperService, breakerService);
return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache,
mapperService, breakerService, globalOrdinalBuilder);
}
}
@ -251,4 +263,144 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
}
}
private class ParentTypesGlobalOrdinalsLoading implements WithOrdinals {
public ParentTypesGlobalOrdinalsLoading() {
}
@Override
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
throw new ElasticsearchIllegalStateException("Shouldn't be invoked");
}
@Override
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) {
throw new ElasticsearchIllegalStateException("Shouldn't be invoked");
}
@Override
public WithOrdinals loadGlobal(IndexReader indexReader) {
if (indexReader.leaves().size() <= 1) {
// ordinals are already global
ImmutableOpenMap.Builder<String, WithOrdinals> globalIfdPerType = ImmutableOpenMap.builder();
for (BytesRef parentType : parentTypes) {
PerType perType = new PerType(parentType.utf8ToString());
globalIfdPerType.put(perType.type, perType);
}
return new ParentChildGlobalOrdinalsIndexFieldData(globalIfdPerType.build(), 0);
}
try {
return cache.load(indexReader, this);
} catch (Throwable e) {
if (e instanceof ElasticsearchException) {
throw (ElasticsearchException) e;
} else {
throw new ElasticsearchException(e.getMessage(), e);
}
}
}
@Override
public WithOrdinals localGlobalDirect(IndexReader indexReader) throws Exception {
ImmutableOpenMap.Builder<String, WithOrdinals> globalIfdPerType = ImmutableOpenMap.builder();
long memorySizeInBytes = 0;
for (BytesRef parentType : parentTypes) {
PerType perType = new PerType(parentType.utf8ToString());
GlobalOrdinalsIndexFieldData globalIfd = (GlobalOrdinalsIndexFieldData) globalOrdinalsBuilder.build(indexReader, perType, indexSettings, breakerService);
globalIfdPerType.put(perType.type, globalIfd);
memorySizeInBytes += globalIfd.getMemorySizeInBytes();
}
return new ParentChildGlobalOrdinalsIndexFieldData(globalIfdPerType.build(), memorySizeInBytes);
}
@Override
public FieldMapper.Names getFieldNames() {
return ParentChildIndexFieldData.this.getFieldNames();
}
@Override
public FieldDataType getFieldDataType() {
return ParentChildIndexFieldData.this.getFieldDataType();
}
@Override
public boolean valuesOrdered() {
return ParentChildIndexFieldData.this.valuesOrdered();
}
@Override
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode) {
throw new UnsupportedOperationException("Sort not supported on PerParentTypeGlobalOrdinals...");
}
@Override
public void clear() {
}
@Override
public void clear(IndexReader reader) {
}
@Override
public Index index() {
return ParentChildIndexFieldData.this.index();
}
private final class PerType extends ParentTypesGlobalOrdinalsLoading {
private final String type;
public PerType(String type) {
this.type = type;
}
@Override
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
return loadDirect(context);
}
@Override
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) {
ParentChildAtomicFieldData parentChildAtomicFieldData = ParentChildIndexFieldData.this.load(context);
AtomicFieldData.WithOrdinals typeAfd = parentChildAtomicFieldData.getAtomicFieldData(type);
if(typeAfd != null) {
return typeAfd;
} else {
return PagedBytesAtomicFieldData.empty();
}
}
@Override
public WithOrdinals loadGlobal(IndexReader indexReader) {
return this;
}
@Override
public WithOrdinals localGlobalDirect(IndexReader indexReader) throws Exception {
return this;
}
}
}
// Effectively this is a cache key for in the field data cache
private final class ParentChildGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData {
private final ImmutableOpenMap<String, WithOrdinals> typeGlobalOrdinals;
private ParentChildGlobalOrdinalsIndexFieldData(ImmutableOpenMap<String, WithOrdinals> typeGlobalOrdinals, long memorySizeInBytes) {
super(ParentChildIndexFieldData.this.index(), ParentChildIndexFieldData.this.indexSettings, ParentChildIndexFieldData.this.getFieldNames(), ParentChildIndexFieldData.this.getFieldDataType(), memorySizeInBytes);
this.typeGlobalOrdinals = typeGlobalOrdinals;
}
@Override
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
throw new ElasticsearchIllegalStateException("Can't use directly");
}
public WithOrdinals type(String type) {
return typeGlobalOrdinals.get(type);
}
}
}

View File

@ -22,28 +22,22 @@ package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
@ -99,58 +93,104 @@ public class ChildrenConstantScoreQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
final SearchContext searchContext = SearchContext.current();
final BytesRefHash parentIds = new BytesRefHash(512, searchContext.bigArrays());
boolean releaseParentIds = true;
try {
final ParentIdCollector collector = new ParentIdCollector(parentType, parentChildIndexFieldData, parentIds);
assert rewrittenChildQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
final Query childQuery = rewrittenChildQuery;
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.search(childQuery, collector);
SearchContext sc = SearchContext.current();
ParentChildIndexFieldData.WithOrdinals globalIfd = parentChildIndexFieldData.getGlobalParentChild(
parentType, searcher.getIndexReader()
);
assert rewrittenChildQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
long remaining = parentIds.size();
if (remaining == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
Filter shortCircuitFilter = null;
if (remaining == 1) {
BytesRef id = parentIds.get(0, new BytesRef());
shortCircuitFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
} else if (remaining <= shortCircuitParentDocSet) {
shortCircuitFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
}
final ParentWeight parentWeight = new ParentWeight(parentFilter, shortCircuitFilter, parentIds);
searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
releaseParentIds = false;
return parentWeight;
} finally {
if (releaseParentIds) {
Releasables.close(parentIds);
}
final long maxOrd;
List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
} else {
AtomicFieldData.WithOrdinals afd = globalIfd.load(leaves.get(0));
BytesValues.WithOrdinals globalValues = afd.getBytesValues(false);
Ordinals.Docs globalOrdinals = globalValues.ordinals();
maxOrd = globalOrdinals.getMaxOrd();
}
if (maxOrd == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
Query childQuery = rewrittenChildQuery;
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, maxOrd);
indexSearcher.search(childQuery, collector);
final long remaining = collector.foundParents();
if (remaining == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
Filter shortCircuitFilter = null;
if (remaining <= shortCircuitParentDocSet) {
shortCircuitFilter = ParentIdsFilter.createShortCircuitFilter(
nonNestedDocsFilter, sc, parentType, collector.values, collector.parentOrds, remaining
);
}
return new ParentWeight(parentFilter, globalIfd, shortCircuitFilter, collector, remaining);
}
private final class ParentWeight extends Weight implements Releasable {
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
ChildrenConstantScoreQuery that = (ChildrenConstantScoreQuery) obj;
if (!originalChildQuery.equals(that.originalChildQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
if (shortCircuitParentDocSet != that.shortCircuitParentDocSet) {
return false;
}
if (getBoost() != that.getBoost()) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = originalChildQuery.hashCode();
result = 31 * result + childType.hashCode();
result = 31 * result + shortCircuitParentDocSet;
result = 31 * result + Float.floatToIntBits(getBoost());
return result;
}
@Override
public String toString(String field) {
return "child_filter[" + childType + "/" + parentType + "](" + originalChildQuery + ')';
}
private final class ParentWeight extends Weight {
private final Filter parentFilter;
private final Filter shortCircuitFilter;
private final BytesRefHash parentIds;
private final ParentOrdCollector collector;
private final IndexFieldData.WithOrdinals globalIfd;
private long remaining;
private float queryNorm;
private float queryWeight;
public ParentWeight(Filter parentFilter, Filter shortCircuitFilter, BytesRefHash parentIds) {
public ParentWeight(Filter parentFilter, IndexFieldData.WithOrdinals globalIfd, Filter shortCircuitFilter, ParentOrdCollector collector, long remaining) {
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
this.globalIfd = globalIfd;
this.shortCircuitFilter = shortCircuitFilter;
this.parentIds = parentIds;
this.remaining = parentIds.size();
this.collector = collector;
this.remaining = remaining;
}
@Override
@ -194,147 +234,99 @@ public class ChildrenConstantScoreQuery extends Query {
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
if (!DocIdSets.isEmpty(parentDocIdSet)) {
BytesValues bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected.
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());
if (bytesValues != null) {
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
if (innerIterator != null) {
ParentDocIdIterator parentDocIdIterator = new ParentDocIdIterator(innerIterator, parentIds, bytesValues);
return ConstantScorer.create(parentDocIdIterator, this, queryWeight);
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
if (innerIterator != null) {
LongBitSet parentOrds = collector.parentOrds;
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues(false);
if (globalValues != null) {
Ordinals.Docs globalOrdinals = globalValues.ordinals();
DocIdSetIterator parentIdIterator = new ParentOrdIterator(innerIterator, parentOrds, globalOrdinals, this);
return ConstantScorer.create(parentIdIterator, this, queryWeight);
}
}
}
return null;
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIds);
}
private final class ParentDocIdIterator extends FilteredDocIdSetIterator {
private final BytesRefHash parentIds;
private final BytesValues values;
private ParentDocIdIterator(DocIdSetIterator innerIterator, BytesRefHash parentIds, BytesValues values) {
super(innerIterator);
this.parentIds = parentIds;
this.values = values;
}
@Override
protected boolean match(int doc) {
if (remaining == 0) {
try {
advance(DocIdSetIterator.NO_MORE_DOCS);
} catch (IOException e) {
throw new RuntimeException(e);
}
return false;
}
values.setDocument(doc);
BytesRef parentId = values.nextValue();
int hash = values.currentValueHash();
boolean match = parentIds.find(parentId, hash) >= 0;
if (match) {
remaining--;
}
return match;
}
}
}
private final static class ParentIdCollector extends NoopCollector {
private final static class ParentOrdCollector extends NoopCollector {
private final BytesRefHash parentIds;
private final String parentType;
private final ParentChildIndexFieldData indexFieldData;
private final LongBitSet parentOrds;
private final ParentChildIndexFieldData.WithOrdinals indexFieldData;
protected BytesValues.WithOrdinals values;
private Ordinals.Docs ordinals;
private BytesValues.WithOrdinals values;
private Ordinals.Docs globalOrdinals;
// This remembers what ordinals have already been seen in the current segment
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
private FixedBitSet seenOrdinals;
protected ParentIdCollector(String parentType, ParentChildIndexFieldData indexFieldData, BytesRefHash parentIds) {
this.parentType = parentType;
private ParentOrdCollector(ParentChildIndexFieldData.WithOrdinals indexFieldData, long maxOrd) {
// TODO: look into reusing LongBitSet#bits array
this.parentOrds = new LongBitSet(maxOrd + 1);
this.indexFieldData = indexFieldData;
this.parentIds = parentIds;
}
@Override
public void collect(int doc) throws IOException {
if (values != null) {
int ord = (int) ordinals.getOrd(doc);
if (!seenOrdinals.get(ord)) {
final BytesRef bytes = values.getValueByOrd(ord);
final int hash = values.currentValueHash();
parentIds.add(bytes, hash);
seenOrdinals.set(ord);
if (globalOrdinals != null) {
long globalOrdinal = globalOrdinals.getOrd(doc);
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
parentOrds.set(globalOrdinal);
}
}
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getBytesValues(parentType);
values = indexFieldData.load(context).getBytesValues(false);
if (values != null) {
ordinals = values.ordinals();
final int maxOrd = (int) ordinals.getMaxOrd();
if (seenOrdinals == null || seenOrdinals.length() < maxOrd) {
seenOrdinals = new FixedBitSet(maxOrd);
} else {
seenOrdinals.clear(0, maxOrd);
globalOrdinals = values.ordinals();
} else {
globalOrdinals = null;
}
}
long foundParents() {
return parentOrds.cardinality();
}
}
private final static class ParentOrdIterator extends FilteredDocIdSetIterator {
private final LongBitSet parentOrds;
private final Ordinals.Docs ordinals;
private final ParentWeight parentWeight;
private ParentOrdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, Ordinals.Docs ordinals, ParentWeight parentWeight) {
super(innerIterator);
this.parentOrds = parentOrds;
this.ordinals = ordinals;
this.parentWeight = parentWeight;
}
@Override
protected boolean match(int doc) {
if (parentWeight.remaining == 0) {
try {
advance(DocIdSetIterator.NO_MORE_DOCS);
} catch (IOException e) {
throw new RuntimeException(e);
}
return false;
}
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
long parentOrd = ordinals.getOrd(doc);
if (parentOrd != Ordinals.MISSING_ORDINAL) {
boolean match = parentOrds.get(parentOrd);
if (match) {
parentWeight.remaining--;
}
return match;
}
return false;
}
ChildrenConstantScoreQuery that = (ChildrenConstantScoreQuery) obj;
if (!originalChildQuery.equals(that.originalChildQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
if (shortCircuitParentDocSet != that.shortCircuitParentDocSet) {
return false;
}
if (getBoost() != that.getBoost()) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = originalChildQuery.hashCode();
result = 31 * result + childType.hashCode();
result = 31 * result + shortCircuitParentDocSet;
result = 31 * result + Float.floatToIntBits(getBoost());
return result;
}
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("child_filter[").append(childType).append("/").append(parentType).append("](").append(originalChildQuery).append(')');
return sb.toString();
}
}

View File

@ -21,31 +21,28 @@ package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.*;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
/**
@ -59,7 +56,7 @@ import java.util.Set;
*/
public class ChildrenQuery extends Query {
private final ParentChildIndexFieldData parentChildIndexFieldData;
private final ParentChildIndexFieldData ifd;
private final String parentType;
private final String childType;
private final Filter parentFilter;
@ -71,8 +68,8 @@ public class ChildrenQuery extends Query {
private Query rewrittenChildQuery;
private IndexReader rewriteIndexReader;
public ChildrenQuery(ParentChildIndexFieldData parentChildIndexFieldData, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet, Filter nonNestedDocsFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData;
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet, Filter nonNestedDocsFilter) {
this.ifd = ifd;
this.parentType = parentType;
this.childType = childType;
this.parentFilter = parentFilter;
@ -114,10 +111,8 @@ public class ChildrenQuery extends Query {
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("ChildrenQuery[").append(childType).append("/").append(parentType).append("](").append(originalChildQuery
.toString(field)).append(')').append(ToStringUtils.boost(getBoost()));
return sb.toString();
return "ChildrenQuery[" + childType + "/" + parentType + "](" + originalChildQuery
.toString(field) + ')' + ToStringUtils.boost(getBoost());
}
@Override
@ -147,100 +142,72 @@ public class ChildrenQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
SearchContext sc = SearchContext.current();
assert rewrittenChildQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
final Query childQuery = rewrittenChildQuery;
IndexFieldData.WithOrdinals globalIfd = ifd.getGlobalParentChild(parentType, searcher.getIndexReader());
if (globalIfd == null) {
// No docs of the specified type don't exist on this shard
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
final BytesRefHash parentIds;
final FloatArray scores;
final IntArray occurrences;
switch (scoreType) {
case MAX:
MaxCollector maxCollector = new MaxCollector(parentChildIndexFieldData, parentType, searchContext);
try {
indexSearcher.search(childQuery, maxCollector);
parentIds = maxCollector.parentIds;
scores = maxCollector.scores;
occurrences = null;
} finally {
Releasables.close(maxCollector.parentIdsIndex);
}
break;
case SUM:
SumCollector sumCollector = new SumCollector(parentChildIndexFieldData, parentType, searchContext);
try {
indexSearcher.search(childQuery, sumCollector);
parentIds = sumCollector.parentIds;
scores = sumCollector.scores;
occurrences = null;
} finally {
Releasables.close(sumCollector.parentIdsIndex);
}
break;
case AVG:
AvgCollector avgCollector = new AvgCollector(parentChildIndexFieldData, parentType, searchContext);
try {
indexSearcher.search(childQuery, avgCollector);
parentIds = avgCollector.parentIds;
scores = avgCollector.scores;
occurrences = avgCollector.occurrences;
} finally {
Releasables.close(avgCollector.parentIdsIndex);
}
break;
default:
throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
}
int size = (int) parentIds.size();
if (size == 0) {
Releasables.close(parentIds, scores, occurrences);
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
final Filter parentFilter;
if (size == 1) {
BytesRef id = parentIds.get(0, new BytesRef());
if (nonNestedDocsFilter != null) {
List<Filter> filters = Arrays.asList(
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
nonNestedDocsFilter
);
parentFilter = new AndFilter(filters);
} else {
parentFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
boolean abort = true;
long numFoundParents;
ParentOrdAndScoreCollector collector = null;
try {
switch (scoreType) {
case MAX:
collector = new MaxCollector(globalIfd, sc);
break;
case SUM:
collector = new SumCollector(globalIfd, sc);
break;
case AVG:
collector = new AvgCollector(globalIfd, sc);
break;
default:
throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
}
} else if (size <= shortCircuitParentDocSet) {
parentFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
indexSearcher.search(childQuery, collector);
numFoundParents = collector.foundParents();
if (numFoundParents == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
abort = false;
} finally {
if (abort) {
Releasables.close(collector);
}
}
sc.addReleasable(collector, Lifetime.COLLECTION);
final Filter parentFilter;
if (numFoundParents <= shortCircuitParentDocSet) {
parentFilter = ParentIdsFilter.createShortCircuitFilter(
nonNestedDocsFilter, sc, parentType, collector.values, collector.parentIdxs, numFoundParents
);
} else {
parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
}
ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, size, parentIds, scores, occurrences);
searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
return parentWeight;
return new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, numFoundParents, collector);
}
private final class ParentWeight extends Weight implements Releasable {
private final class ParentWeight extends Weight {
private final Weight childWeight;
private final Filter parentFilter;
private final BytesRefHash parentIds;
private final FloatArray scores;
private final IntArray occurrences;
private final ParentOrdAndScoreCollector collector;
private int remaining;
private long remaining;
private ParentWeight(Weight childWeight, Filter parentFilter, int remaining, BytesRefHash parentIds, FloatArray scores, IntArray occurrences) {
private ParentWeight(Weight childWeight, Filter parentFilter, long remaining, ParentOrdAndScoreCollector collector) {
this.childWeight = childWeight;
this.parentFilter = parentFilter;
this.remaining = remaining;
this.parentIds = parentIds;
this.scores = scores;
this.occurrences = occurrences;
this.collector = collector;
}
@Override
@ -271,216 +238,57 @@ public class ChildrenQuery extends Query {
return null;
}
BytesValues bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected.
DocIdSetIterator parents = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
BytesValues.WithOrdinals bytesValues = collector.globalIfd.load(context).getBytesValues(false);
if (bytesValues == null) {
return null;
}
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected.
DocIdSetIterator parentsIterator = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
switch (scoreType) {
case AVG:
return new AvgParentScorer(this, bytesValues, parentIds, scores, occurrences, parentsIterator);
return new AvgParentScorer(this, parents, collector, bytesValues.ordinals());
default:
return new ParentScorer(this, bytesValues, parentIds, scores, parentsIterator);
}
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIds, scores, occurrences);
}
private class ParentScorer extends Scorer {
final BytesRefHash parentIds;
final FloatArray scores;
final BytesValues bytesValues;
final DocIdSetIterator parentsIterator;
int currentDocId = -1;
float currentScore;
ParentScorer(Weight weight, BytesValues bytesValues, BytesRefHash parentIds, FloatArray scores, DocIdSetIterator parentsIterator) {
super(weight);
this.bytesValues = bytesValues;
this.parentsIterator = parentsIterator;
this.parentIds = parentIds;
this.scores = scores;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public int freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentDocId;
}
@Override
public int nextDoc() throws IOException {
if (remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
bytesValues.setDocument(currentDocId);
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
if (index != -1) {
currentScore = scores.get(index);
remaining--;
return currentDocId;
}
}
}
@Override
public int advance(int target) throws IOException {
if (remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
bytesValues.setDocument(currentDocId);
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
if (index != -1) {
currentScore = scores.get(index);
remaining--;
return currentDocId;
} else {
return nextDoc();
}
}
@Override
public long cost() {
return parentsIterator.cost();
}
}
private final class AvgParentScorer extends ParentScorer {
final IntArray occurrences;
AvgParentScorer(Weight weight, BytesValues values, BytesRefHash parentIds, FloatArray scores, IntArray occurrences, DocIdSetIterator parentsIterator) {
super(weight, values, parentIds, scores, parentsIterator);
this.occurrences = occurrences;
}
@Override
public int nextDoc() throws IOException {
if (remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
bytesValues.setDocument(currentDocId);
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
if (index != -1) {
currentScore = scores.get(index);
currentScore /= occurrences.get(index);
remaining--;
return currentDocId;
}
}
}
@Override
public int advance(int target) throws IOException {
if (remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
bytesValues.setDocument(currentDocId);
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
if (index != -1) {
currentScore = scores.get(index);
currentScore /= occurrences.get(index);
remaining--;
return currentDocId;
} else {
return nextDoc();
}
return new ParentScorer(this, parents, collector, bytesValues.ordinals());
}
}
}
private abstract static class ParentIdAndScoreCollector extends NoopCollector {
private abstract static class ParentOrdAndScoreCollector extends NoopCollector implements Releasable {
final BytesRefHash parentIds;
protected final String parentType;
private final ParentChildIndexFieldData indexFieldData;
private final IndexFieldData.WithOrdinals globalIfd;
protected final LongHash parentIdxs;
protected final BigArrays bigArrays;
protected FloatArray scores;
protected final SearchContext searchContext;
protected Ordinals.Docs globalOrdinals;
protected BytesValues.WithOrdinals values;
protected Ordinals.Docs ordinals;
protected Scorer scorer;
// This remembers what ordinals have already been seen in the current segment
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
protected LongArray parentIdsIndex;
private ParentIdAndScoreCollector(ParentChildIndexFieldData indexFieldData, String parentType, SearchContext searchContext) {
this.parentType = parentType;
this.indexFieldData = indexFieldData;
private ParentOrdAndScoreCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
this.globalIfd = globalIfd;
this.bigArrays = searchContext.bigArrays();
this.parentIds = new BytesRefHash(512, bigArrays);
this.parentIdxs = new LongHash(512, bigArrays);
this.scores = bigArrays.newFloatArray(512, false);
this.searchContext = searchContext;
}
@Override
public void collect(int doc) throws IOException {
if (values != null) {
long ord = ordinals.getOrd(doc);
long parentIdx = parentIdsIndex.get(ord);
if (parentIdx < 0) {
final BytesRef bytes = values.getValueByOrd(ord);
final int hash = values.currentValueHash();
parentIdx = parentIds.add(bytes, hash);
if (parentIdx < 0) {
parentIdx = -parentIdx - 1;
doScore(parentIdx);
} else {
if (globalOrdinals != null) {
final long globalOrdinal = globalOrdinals.getOrd(doc);
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
long parentIdx = parentIdxs.add(globalOrdinal);
if (parentIdx >= 0) {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
} else {
parentIdx = -1 - parentIdx;
doScore(parentIdx);
}
parentIdsIndex.set(ord, parentIdx);
} else {
doScore(parentIdx);
}
}
}
@ -490,31 +298,32 @@ public class ChildrenQuery extends Query {
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getBytesValues(parentType);
values = globalIfd.load(context).getBytesValues(false);
if (values != null) {
ordinals = values.ordinals();
final long maxOrd = ordinals.getMaxOrd();
if (parentIdsIndex == null) {
parentIdsIndex = bigArrays.newLongArray(BigArrays.overSize(maxOrd), false);
} else if (parentIdsIndex.size() < maxOrd) {
parentIdsIndex = bigArrays.grow(parentIdsIndex, maxOrd);
}
parentIdsIndex.fill(0, maxOrd, -1L);
globalOrdinals = values.ordinals();
}
}
public long foundParents() {
return parentIdxs.size();
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs, scores);
}
}
private final static class SumCollector extends ParentIdAndScoreCollector {
private final static class SumCollector extends ParentOrdAndScoreCollector {
private SumCollector(ParentChildIndexFieldData indexFieldData, String parentType, SearchContext searchContext) {
super(indexFieldData, parentType, searchContext);
private SumCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
super(globalIfd, searchContext);
}
@Override
@ -523,10 +332,10 @@ public class ChildrenQuery extends Query {
}
}
private final static class MaxCollector extends ParentIdAndScoreCollector {
private final static class MaxCollector extends ParentOrdAndScoreCollector {
private MaxCollector(ParentChildIndexFieldData indexFieldData, String childType, SearchContext searchContext) {
super(indexFieldData, childType, searchContext);
private MaxCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
super(globalIfd, searchContext);
}
@Override
@ -538,42 +347,199 @@ public class ChildrenQuery extends Query {
}
}
private final static class AvgCollector extends ParentIdAndScoreCollector {
private final static class AvgCollector extends ParentOrdAndScoreCollector {
private IntArray occurrences;
AvgCollector(ParentChildIndexFieldData indexFieldData, String childType, SearchContext searchContext) {
super(indexFieldData, childType, searchContext);
AvgCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
super(globalIfd, searchContext);
this.occurrences = bigArrays.newIntArray(512, false);
}
@Override
public void collect(int doc) throws IOException {
if (values != null) {
int ord = (int) ordinals.getOrd(doc);
long parentIdx = parentIdsIndex.get(ord);
if (parentIdx < 0) {
final BytesRef bytes = values.getValueByOrd(ord);
final int hash = values.currentValueHash();
parentIdx = parentIds.add(bytes, hash);
if (parentIdx < 0) {
parentIdx = -parentIdx - 1;
if (globalOrdinals != null) {
final long globalOrdinal = globalOrdinals.getOrd(doc);
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
long parentIdx = parentIdxs.add(globalOrdinal);
if (parentIdx >= 0) {
scores = bigArrays.grow(scores, parentIdx + 1);
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
scores.set(parentIdx, scorer.score());
occurrences.set(parentIdx, 1);
} else {
parentIdx = -1 - parentIdx;
scores.increment(parentIdx, scorer.score());
occurrences.increment(parentIdx, 1);
} else {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
occurrences.set(parentIdx, 1);
}
parentIdsIndex.set(ord, parentIdx);
} else {
scores.increment(parentIdx, scorer.score());
occurrences.increment(parentIdx, 1);
}
}
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs, scores, occurrences);
}
}
private static class ParentScorer extends Scorer {
final ParentWeight parentWeight;
final LongHash parentIds;
final FloatArray scores;
final Ordinals.Docs globalOrdinals;
final DocIdSetIterator parentsIterator;
int currentDocId = -1;
float currentScore;
ParentScorer(ParentWeight parentWeight, DocIdSetIterator parentsIterator, ParentOrdAndScoreCollector collector, Ordinals.Docs globalOrdinals) {
super(parentWeight);
this.parentWeight = parentWeight;
this.globalOrdinals = globalOrdinals;
this.parentsIterator = parentsIterator;
this.parentIds = collector.parentIdxs;
this.scores = collector.scores;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public int freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentDocId;
}
@Override
public int nextDoc() throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
continue;
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
parentWeight.remaining--;
return currentDocId;
}
}
}
@Override
public int advance(int target) throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
return nextDoc();
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
parentWeight.remaining--;
return currentDocId;
} else {
return nextDoc();
}
}
@Override
public long cost() {
return parentsIterator.cost();
}
}
private static final class AvgParentScorer extends ParentScorer {
private final IntArray occurrences;
AvgParentScorer(ParentWeight weight, DocIdSetIterator parentsIterator, ParentOrdAndScoreCollector collector, Ordinals.Docs globalOrdinals) {
super(weight, parentsIterator, collector, globalOrdinals);
this.occurrences = ((AvgCollector) collector).occurrences;
}
@Override
public int nextDoc() throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
continue;
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
currentScore /= occurrences.get(parentIdx);
parentWeight.remaining--;
return currentDocId;
}
}
}
@Override
public int advance(int target) throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
return nextDoc();
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
currentScore /= occurrences.get(parentIdx);
parentWeight.remaining--;
return currentDocId;
} else {
return nextDoc();
}
}
}
}

View File

@ -23,22 +23,19 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
@ -88,174 +85,36 @@ public class ParentConstantScoreQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
final SearchContext searchContext = SearchContext.current();
final BytesRefHash parentIds = new BytesRefHash(512, searchContext.bigArrays());
boolean releaseParentIds = true;
try {
ParentIdsCollector collector = new ParentIdsCollector(parentType, parentChildIndexFieldData, parentIds);
assert rewrittenParentQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
final Query parentQuery = rewrittenParentQuery;
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.search(parentQuery, collector);
IndexFieldData.WithOrdinals globalIfd = parentChildIndexFieldData.getGlobalParentChild(parentType, searcher.getIndexReader());
assert rewrittenParentQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
if (parentIds.size() == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
final ChildrenWeight childrenWeight = new ChildrenWeight(childrenFilter, parentIds);
searchContext.addReleasable(childrenWeight, Lifetime.COLLECTION);
releaseParentIds = false;
return childrenWeight;
} finally {
if (releaseParentIds) {
Releasables.close(parentIds);
}
}
}
private final class ChildrenWeight extends Weight implements Releasable {
private final Filter childrenFilter;
private final BytesRefHash parentIds;
private float queryNorm;
private float queryWeight;
private FixedBitSet seenOrdinalsCache;
private FixedBitSet seenMatchedOrdinalsCache;
private ChildrenWeight(Filter childrenFilter, BytesRefHash parentIds) {
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.parentIds = parentIds;
final long maxOrd;
List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
} else {
AtomicFieldData.WithOrdinals afd = globalIfd.load(leaves.get(0));
BytesValues.WithOrdinals globalValues = afd.getBytesValues(false);
Ordinals.Docs globalOrdinals = globalValues.ordinals();
maxOrd = globalOrdinals.getMaxOrd();
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
if (maxOrd == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
@Override
public Query getQuery() {
return ParentConstantScoreQuery.this;
final Query parentQuery = rewrittenParentQuery;
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd);
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocIdSet)) {
return null;
}
BytesValues.WithOrdinals bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
if (bytesValues != null) {
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
if (innerIterator != null) {
Ordinals.Docs ordinals = bytesValues.ordinals();
int maxOrd = (int) ordinals.getMaxOrd();
if (seenOrdinalsCache == null || seenOrdinalsCache.length() < maxOrd) {
seenOrdinalsCache = new FixedBitSet(maxOrd);
seenMatchedOrdinalsCache = new FixedBitSet(maxOrd);
} else {
seenOrdinalsCache.clear(0, maxOrd);
seenMatchedOrdinalsCache.clear(0, maxOrd);
}
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(
innerIterator, parentIds, bytesValues, ordinals, seenOrdinalsCache, seenMatchedOrdinalsCache
);
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
}
}
return null;
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIds);
}
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
private final BytesRefHash parentIds;
private final BytesValues.WithOrdinals bytesValues;
private final Ordinals.Docs ordinals;
// This remembers what ordinals have already been emitted in the current segment
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
private final FixedBitSet seenOrdinals;
private final FixedBitSet seenMatchedOrdinals;
ChildrenDocIdIterator(DocIdSetIterator innerIterator, BytesRefHash parentIds, BytesValues.WithOrdinals bytesValues, Ordinals.Docs ordinals, FixedBitSet seenOrdinals, FixedBitSet seenMatchedOrdinals) {
super(innerIterator);
this.parentIds = parentIds;
this.bytesValues = bytesValues;
this.ordinals = ordinals;
this.seenOrdinals = seenOrdinals;
this.seenMatchedOrdinals = seenMatchedOrdinals;
}
@Override
protected boolean match(int doc) {
int ord = (int) ordinals.getOrd(doc);
if (ord == Ordinals.MISSING_ORDINAL) {
return false;
}
if (!seenOrdinals.get(ord)) {
seenOrdinals.set(ord);
if (parentIds.find(bytesValues.getValueByOrd(ord), bytesValues.currentValueHash()) >= 0) {
seenMatchedOrdinals.set(ord);
return true;
} else {
return false;
}
} else {
return seenMatchedOrdinals.get(ord);
}
}
}
}
private final static class ParentIdsCollector extends NoopCollector {
private final BytesRefHash parentIds;
private final ParentChildIndexFieldData indexFieldData;
private final String parentType;
private BytesValues values;
ParentIdsCollector(String parentType, ParentChildIndexFieldData indexFieldData, BytesRefHash parentIds) {
this.parentIds = parentIds;
this.indexFieldData = indexFieldData;
this.parentType = parentType;
}
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (values != null) {
values.setDocument(doc);
parentIds.add(values.nextValue(), values.currentValueHash());
}
}
@Override
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
values = indexFieldData.load(readerContext).getBytesValues(parentType);
}
return new ChildrenWeight(childrenFilter, collector, globalIfd);
}
@Override
@ -290,9 +149,125 @@ public class ParentConstantScoreQuery extends Query {
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("parent_filter[").append(parentType).append("](").append(originalParentQuery).append(')');
return sb.toString();
return "parent_filter[" + parentType + "](" + originalParentQuery + ')';
}
private final class ChildrenWeight extends Weight {
private final IndexFieldData.WithOrdinals globalIfd;
private final Filter childrenFilter;
private final LongBitSet parentOrds;
private float queryNorm;
private float queryWeight;
private ChildrenWeight(Filter childrenFilter, ParentOrdsCollector collector, IndexFieldData.WithOrdinals globalIfd) {
this.globalIfd = globalIfd;
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.parentOrds = collector.parentOrds;
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public Query getQuery() {
return ParentConstantScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
return queryWeight * queryWeight;
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocIdSet)) {
return null;
}
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues(false);
if (globalValues != null) {
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
if (innerIterator != null) {
Ordinals.Docs globalOrdinals = globalValues.ordinals();
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(
innerIterator, parentOrds, globalOrdinals
);
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
}
}
return null;
}
}
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
private final LongBitSet parentOrds;
private final Ordinals.Docs globalOrdinals;
ChildrenDocIdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, Ordinals.Docs globalOrdinals) {
super(innerIterator);
this.parentOrds = parentOrds;
this.globalOrdinals = globalOrdinals;
}
@Override
protected boolean match(int docId) {
int globalOrd = (int) globalOrdinals.getOrd(docId);
if (globalOrd != Ordinals.MISSING_ORDINAL) {
return parentOrds.get(globalOrd);
} else {
return false;
}
}
}
private final static class ParentOrdsCollector extends NoopCollector {
private final LongBitSet parentOrds;
private final IndexFieldData.WithOrdinals globalIfd;
private Ordinals.Docs globalOrdinals;
ParentOrdsCollector(IndexFieldData.WithOrdinals globalIfd, long maxOrd) {
this.parentOrds = new LongBitSet(maxOrd);
this.globalIfd = globalIfd;
}
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (globalOrdinals != null) {
long globalOrd = globalOrdinals.getOrd(doc);
if (globalOrd != Ordinals.MISSING_ORDINAL) {
parentOrds.set(globalOrd);
}
}
}
@Override
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
BytesValues.WithOrdinals values = globalIfd.load(readerContext).getBytesValues(false);
if (values != null) {
globalOrdinals = values.ordinals();
}
}
public long parentCount() {
return parentOrds.cardinality();
}
}
}

View File

@ -18,36 +18,106 @@
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.*;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.*;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
/**
* Advantages over using this filter over Lucene's TermsFilter in the parent child context:
* 1) Don't need to copy all values over to a list from the id cache and then
* copy all the ids values over to one continuous byte array. Should save a lot of of object creations and gcs..
* 2) We filter docs by one field only.
* 3) We can directly reference to values that originate from the id cache.
*/
final class ParentIdsFilter extends Filter {
static Filter createShortCircuitFilter(Filter nonNestedDocsFilter, SearchContext searchContext,
String parentType, BytesValues.WithOrdinals globalValues,
LongBitSet parentOrds, long numFoundParents) {
if (numFoundParents == 1) {
globalValues.getValueByOrd(parentOrds.nextSetBit(0));
BytesRef id = globalValues.copyShared();
if (nonNestedDocsFilter != null) {
List<Filter> filters = Arrays.asList(
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
nonNestedDocsFilter
);
return new AndFilter(filters);
} else {
return new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
}
} else {
BytesRefHash parentIds= null;
boolean constructed = false;
try {
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
for (long parentOrd = parentOrds.nextSetBit(0l); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
parentIds.add(globalValues.getValueByOrd(parentOrd));
}
constructed = true;
} finally {
if (!constructed) {
Releasables.close(parentIds);
}
}
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
}
}
static Filter createShortCircuitFilter(Filter nonNestedDocsFilter, SearchContext searchContext,
String parentType, BytesValues.WithOrdinals globalValues,
LongHash parentIdxs, long numFoundParents) {
if (numFoundParents == 1) {
globalValues.getValueByOrd(parentIdxs.get(0));
BytesRef id = globalValues.copyShared();
if (nonNestedDocsFilter != null) {
List<Filter> filters = Arrays.asList(
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
nonNestedDocsFilter
);
return new AndFilter(filters);
} else {
return new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
}
} else {
BytesRefHash parentIds = null;
boolean constructed = false;
try {
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
for (int id = 0; id < parentIdxs.size(); id++) {
parentIds.add(globalValues.getValueByOrd(parentIdxs.get(id)));
}
constructed = true;
} finally {
if (!constructed) {
Releasables.close(parentIds);
}
}
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
}
}
private final BytesRef parentTypeBr;
private final Filter nonNestedDocsFilter;
private final BytesRefHash parentIds;
ParentIdsFilter(String parentType, Filter nonNestedDocsFilter, BytesRefHash parentIds) {
private ParentIdsFilter(String parentType, Filter nonNestedDocsFilter, BytesRefHash parentIds) {
this.nonNestedDocsFilter = nonNestedDocsFilter;
this.parentTypeBr = new BytesRef(parentType);
this.parentIds = parentIds;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
@ -33,10 +32,10 @@ import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.BytesValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext;
@ -99,11 +98,7 @@ public class ParentQuery extends Query {
@Override
public String toString(String field) {
StringBuilder sb = new StringBuilder();
sb.append("ParentQuery[").append(parentType).append("](")
.append(originalParentQuery.toString(field)).append(')')
.append(ToStringUtils.boost(getBoost()));
return sb.toString();
return "ParentQuery[" + parentType + "](" + originalParentQuery.toString(field) + ')' + ToStringUtils.boost(getBoost());
}
@Override
@ -133,62 +128,70 @@ public class ParentQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
SearchContext searchContext = SearchContext.current();
final ParentIdAndScoreCollector collector = new ParentIdAndScoreCollector(searchContext, parentChildIndexFieldData, parentType);
SearchContext sc = SearchContext.current();
ChildWeight childWeight;
boolean releaseCollectorResource = true;
ParentOrdAndScoreCollector collector = null;
IndexFieldData.WithOrdinals globalIfd = parentChildIndexFieldData.getGlobalParentChild(parentType, searcher.getIndexReader());
if (globalIfd == null) {
// No docs of the specified type don't exist on this shard
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
try {
assert rewrittenParentQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
final Query parentQuery = rewrittenParentQuery;
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
collector = new ParentOrdAndScoreCollector(sc, globalIfd);
IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.search(parentQuery, collector);
FloatArray scores = collector.scores;
BytesRefHash parentIds = collector.parentIds;
if (parentIds.size() == 0) {
if (collector.parentCount() == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
childWeight = new ChildWeight(searchContext, parentQuery.createWeight(searcher), childrenFilter, parentIds, scores);
childWeight = new ChildWeight(parentQuery.createWeight(searcher), childrenFilter, collector, globalIfd);
releaseCollectorResource = false;
} finally {
if (releaseCollectorResource) {
// either if we run into an exception or if we return early
Releasables.close(collector.parentIds, collector.scores);
Releasables.close(collector);
}
}
searchContext.addReleasable(childWeight, Lifetime.COLLECTION);
sc.addReleasable(collector, Lifetime.COLLECTION);
return childWeight;
}
private static class ParentIdAndScoreCollector extends NoopCollector {
private static class ParentOrdAndScoreCollector extends NoopCollector implements Releasable {
private final BytesRefHash parentIds;
private final LongHash parentIdxs;
private FloatArray scores;
private final ParentChildIndexFieldData indexFieldData;
private final String parentType;
private final IndexFieldData.WithOrdinals globalIfd;
private final BigArrays bigArrays;
private Scorer scorer;
private BytesValues values;
private BytesValues.WithOrdinals values;
private Ordinals.Docs globalOrdinals;
ParentIdAndScoreCollector(SearchContext searchContext, ParentChildIndexFieldData indexFieldData, String parentType) {
ParentOrdAndScoreCollector(SearchContext searchContext, IndexFieldData.WithOrdinals globalIfd) {
this.bigArrays = searchContext.bigArrays();
this.parentIds = new BytesRefHash(512, bigArrays);
this.parentIdxs = new LongHash(512, bigArrays);
this.scores = bigArrays.newFloatArray(512, false);
this.indexFieldData = indexFieldData;
this.parentType = parentType;
this.globalIfd = globalIfd;
}
@Override
public void collect(int doc) throws IOException {
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (values != null) {
values.setDocument(doc);
long index = parentIds.add(values.nextValue(), values.currentValueHash());
if (index >= 0) {
scores = bigArrays.grow(scores, index + 1);
scores.set(index, scorer.score());
if (globalOrdinals != null) {
long globalOrdinal = globalOrdinals.getOrd(doc);
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
long parentIdx = parentIdxs.add(globalOrdinal);
if (parentIdx >= 0) {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
} else {
assert false : "parent id should only match once, since there can only be one parent doc";
}
}
}
}
@ -200,27 +203,37 @@ public class ParentQuery extends Query {
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
values = indexFieldData.load(context).getBytesValues(parentType);
values = globalIfd.load(context).getBytesValues(false);
if (values != null) {
globalOrdinals = values.ordinals();
}
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs, scores);
}
public long parentCount() {
return parentIdxs.size();
}
}
private class ChildWeight extends Weight implements Releasable {
private class ChildWeight extends Weight {
private final SearchContext searchContext;
private final Weight parentWeight;
private final Filter childrenFilter;
private final BytesRefHash parentIds;
private final LongHash parentIdxs;
private final FloatArray scores;
private final IndexFieldData.WithOrdinals globalIfd;
private FixedBitSet seenOrdinalsCache;
private LongArray parentIdsIndexCache;
private ChildWeight(SearchContext searchContext, Weight parentWeight, Filter childrenFilter, BytesRefHash parentIds, FloatArray scores) {
this.searchContext = searchContext;
private ChildWeight(Weight parentWeight, Filter childrenFilter, ParentOrdAndScoreCollector collector, IndexFieldData.WithOrdinals globalIfd) {
this.parentWeight = parentWeight;
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.parentIds = parentIds;
this.scores = scores;
this.parentIdxs = collector.parentIdxs;
this.scores = collector.scores;
this.globalIfd = globalIfd;
}
@Override
@ -250,60 +263,33 @@ public class ParentQuery extends Query {
if (DocIdSets.isEmpty(childrenDocSet)) {
return null;
}
BytesValues.WithOrdinals bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
BytesValues.WithOrdinals bytesValues = globalIfd.load(context).getBytesValues(false);
if (bytesValues == null) {
return null;
}
Ordinals.Docs ordinals = bytesValues.ordinals();
final int maxOrd = (int) ordinals.getMaxOrd();
final BigArrays bigArrays = searchContext.bigArrays();
if (parentIdsIndexCache == null) {
parentIdsIndexCache = bigArrays.newLongArray(BigArrays.overSize(maxOrd), false);
} else if (parentIdsIndexCache.size() < maxOrd) {
parentIdsIndexCache = bigArrays.grow(parentIdsIndexCache, maxOrd);
}
parentIdsIndexCache.fill(0, maxOrd, -1L);
if (seenOrdinalsCache == null || seenOrdinalsCache.length() < maxOrd) {
seenOrdinalsCache = new FixedBitSet(maxOrd);
} else {
seenOrdinalsCache.clear(0, maxOrd);
}
return new ChildScorer(this, parentIds, scores, childrenDocSet.iterator(), bytesValues, ordinals, seenOrdinalsCache, parentIdsIndexCache);
return new ChildScorer(this, parentIdxs, scores, childrenDocSet.iterator(), ordinals);
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIds, scores, parentIdsIndexCache);
}
}
private static class ChildScorer extends Scorer {
private final BytesRefHash parentIds;
private final LongHash parentIdxs;
private final FloatArray scores;
private final DocIdSetIterator childrenIterator;
private final BytesValues.WithOrdinals bytesValues;
private final Ordinals.Docs ordinals;
// This remembers what ordinals have already been seen in the current segment
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
private final FixedBitSet seenOrdinals;
private final LongArray parentIdsIndex;
private int currentChildDoc = -1;
private float currentScore;
ChildScorer(Weight weight, BytesRefHash parentIds, FloatArray scores, DocIdSetIterator childrenIterator,
BytesValues.WithOrdinals bytesValues, Ordinals.Docs ordinals, FixedBitSet seenOrdinals, LongArray parentIdsIndex) {
ChildScorer(Weight weight, LongHash parentIdxs, FloatArray scores, DocIdSetIterator childrenIterator, Ordinals.Docs ordinals) {
super(weight);
this.parentIds = parentIds;
this.parentIdxs = parentIdxs;
this.scores = scores;
this.childrenIterator = childrenIterator;
this.bytesValues = bytesValues;
this.ordinals = ordinals;
this.seenOrdinals = seenOrdinals;
this.parentIdsIndex = parentIdsIndex;
}
@Override
@ -331,25 +317,15 @@ public class ParentQuery extends Query {
return currentChildDoc;
}
int ord = (int) ordinals.getOrd(currentChildDoc);
if (ord == Ordinals.MISSING_ORDINAL) {
int globalOrdinal = (int) ordinals.getOrd(currentChildDoc);
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
continue;
}
if (!seenOrdinals.get(ord)) {
seenOrdinals.set(ord);
long parentIdx = parentIds.find(bytesValues.getValueByOrd(ord), bytesValues.currentValueHash());
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
parentIdsIndex.set(ord, parentIdx);
return currentChildDoc;
}
} else {
long parentIdx = parentIdsIndex.get(ord);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
return currentChildDoc;
}
final long parentIdx = parentIdxs.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
return currentChildDoc;
}
}
}
@ -361,29 +337,17 @@ public class ParentQuery extends Query {
return currentChildDoc;
}
int ord = (int) ordinals.getOrd(currentChildDoc);
if (ord == Ordinals.MISSING_ORDINAL) {
int globalOrdinal = (int) ordinals.getOrd(currentChildDoc);
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
return nextDoc();
}
if (!seenOrdinals.get(ord)) {
seenOrdinals.set(ord);
long parentIdx = parentIds.find(bytesValues.getValueByOrd(ord), bytesValues.currentValueHash());
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
parentIdsIndex.set(ord, parentIdx);
return currentChildDoc;
} else {
return nextDoc();
}
final long parentIdx = parentIdxs.find(globalOrdinal);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
return currentChildDoc;
} else {
long parentIdx = parentIdsIndex.get(ord);
if (parentIdx != -1) {
currentScore = scores.get(parentIdx);
return currentChildDoc;
} else {
return nextDoc();
}
return nextDoc();
}
}

View File

@ -120,6 +120,9 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
IndexSearcher searcher = new IndexSearcher(indexReader);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(
SearchContext.current(), new Engine.SimpleSearcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
));
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));

View File

@ -1388,7 +1388,7 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
client().prepareIndex("grandissue", "child_type_two", "4").setParent("2").setRouting("1")
.setSource("name", "Kate")
.get();
client().admin().indices().prepareRefresh("grandissue").get();
refresh();
SearchResponse searchResponse = client().prepareSearch("grandissue").setQuery(
boolQuery().must(