Cut p/c queries (has_child and has_parent queries) over to use global ordinals instead of being bytes values based.
Closes #5846
This commit is contained in:
parent
fc3efda6af
commit
0f23485a3c
|
@ -20,45 +20,33 @@ package org.elasticsearch.index.fielddata.ordinals;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
|
||||||
import org.apache.lucene.util.LongValues;
|
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.AbstractIndexComponent;
|
import org.elasticsearch.index.AbstractIndexComponent;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.fielddata.*;
|
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
import org.elasticsearch.search.MultiValueMode;
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.InternalGlobalOrdinalsBuilder.OrdinalMappingSource;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.plain.AtomicFieldDataWithOrdinalsTermsEnum;
|
import org.elasticsearch.index.fielddata.RamUsage;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
import org.elasticsearch.search.MultiValueMode;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@link IndexFieldData} impl based on global ordinals.
|
* {@link IndexFieldData} base class for concrete global ordinals implementations.
|
||||||
*/
|
*/
|
||||||
public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexFieldData.WithOrdinals, RamUsage {
|
public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexFieldData.WithOrdinals, RamUsage {
|
||||||
|
|
||||||
private final FieldMapper.Names fieldNames;
|
private final FieldMapper.Names fieldNames;
|
||||||
private final FieldDataType fieldDataType;
|
private final FieldDataType fieldDataType;
|
||||||
private final Atomic[] atomicReaders;
|
|
||||||
private final long memorySizeInBytes;
|
private final long memorySizeInBytes;
|
||||||
|
|
||||||
public GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicFieldData.WithOrdinals[] segmentAfd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource[] segmentOrdToGlobalOrds, long memorySizeInBytes) {
|
protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) {
|
||||||
super(index, settings);
|
super(index, settings);
|
||||||
this.fieldNames = fieldNames;
|
this.fieldNames = fieldNames;
|
||||||
this.fieldDataType = fieldDataType;
|
this.fieldDataType = fieldDataType;
|
||||||
this.atomicReaders = new Atomic[segmentAfd.length];
|
|
||||||
for (int i = 0; i < segmentAfd.length; i++) {
|
|
||||||
atomicReaders[i] = new Atomic(segmentAfd[i], globalOrdToFirstSegment, globalOrdToFirstSegmentDelta, segmentOrdToGlobalOrds[i]);
|
|
||||||
}
|
|
||||||
this.memorySizeInBytes = memorySizeInBytes;
|
this.memorySizeInBytes = memorySizeInBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
|
|
||||||
return atomicReaders[context.ord];
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) throws Exception {
|
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) throws Exception {
|
||||||
return load(context);
|
return load(context);
|
||||||
|
@ -109,86 +97,4 @@ public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent i
|
||||||
return memorySizeInBytes;
|
return memorySizeInBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class Atomic implements AtomicFieldData.WithOrdinals {
|
|
||||||
|
|
||||||
private final AtomicFieldData.WithOrdinals afd;
|
|
||||||
private final OrdinalMappingSource segmentOrdToGlobalOrdLookup;
|
|
||||||
private final LongValues globalOrdToFirstSegment;
|
|
||||||
private final LongValues globalOrdToFirstSegmentDelta;
|
|
||||||
|
|
||||||
private Atomic(WithOrdinals afd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource segmentOrdToGlobalOrdLookup) {
|
|
||||||
this.afd = afd;
|
|
||||||
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
|
|
||||||
this.globalOrdToFirstSegment = globalOrdToFirstSegment;
|
|
||||||
this.globalOrdToFirstSegmentDelta = globalOrdToFirstSegmentDelta;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
|
||||||
BytesValues.WithOrdinals values = afd.getBytesValues(false);
|
|
||||||
Ordinals.Docs segmentOrdinals = values.ordinals();
|
|
||||||
final Ordinals.Docs globalOrdinals;
|
|
||||||
if (segmentOrdToGlobalOrdLookup != null) {
|
|
||||||
globalOrdinals = segmentOrdToGlobalOrdLookup.globalOrdinals(segmentOrdinals);
|
|
||||||
} else {
|
|
||||||
globalOrdinals = segmentOrdinals;
|
|
||||||
}
|
|
||||||
final BytesValues.WithOrdinals[] bytesValues = new BytesValues.WithOrdinals[atomicReaders.length];
|
|
||||||
for (int i = 0; i < bytesValues.length; i++) {
|
|
||||||
bytesValues[i] = atomicReaders[i].afd.getBytesValues(false);
|
|
||||||
}
|
|
||||||
return new BytesValues.WithOrdinals(globalOrdinals) {
|
|
||||||
|
|
||||||
int readerIndex;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef getValueByOrd(long globalOrd) {
|
|
||||||
final long segmentOrd = globalOrd - globalOrdToFirstSegmentDelta.get(globalOrd);
|
|
||||||
readerIndex = (int) globalOrdToFirstSegment.get(globalOrd);
|
|
||||||
return bytesValues[readerIndex].getValueByOrd(segmentOrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public BytesRef copyShared() {
|
|
||||||
return bytesValues[readerIndex].copyShared();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int currentValueHash() {
|
|
||||||
return bytesValues[readerIndex].currentValueHash();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isMultiValued() {
|
|
||||||
return afd.isMultiValued();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getNumberUniqueValues() {
|
|
||||||
return afd.getNumberUniqueValues();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getMemorySizeInBytes() {
|
|
||||||
return afd.getMemorySizeInBytes();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ScriptDocValues getScriptValues() {
|
|
||||||
throw new UnsupportedOperationException("Script values not supported on global ordinals");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TermsEnum getTermsEnum() {
|
|
||||||
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,7 +112,7 @@ public class InternalGlobalOrdinalsBuilder extends AbstractIndexComponent implem
|
||||||
(System.currentTimeMillis() - startTime)
|
(System.currentTimeMillis() - startTime)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return new GlobalOrdinalsIndexFieldData(indexFieldData.index(), settings, indexFieldData.getFieldNames(),
|
return new InternalGlobalOrdinalsIndexFieldData(indexFieldData.index(), settings, indexFieldData.getFieldNames(),
|
||||||
fieldDataType, withOrdinals, globalOrdToFirstSegment, globalOrdToFirstSegmentDelta,
|
fieldDataType, withOrdinals, globalOrdToFirstSegment, globalOrdToFirstSegmentDelta,
|
||||||
segmentOrdToGlobalOrdLookups, memorySizeInBytes
|
segmentOrdToGlobalOrdLookups, memorySizeInBytes
|
||||||
);
|
);
|
||||||
|
|
|
@ -0,0 +1,137 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.index.fielddata.ordinals;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
|
import org.apache.lucene.index.TermsEnum;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.LongValues;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||||
|
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.InternalGlobalOrdinalsBuilder.OrdinalMappingSource;
|
||||||
|
import org.elasticsearch.index.fielddata.plain.AtomicFieldDataWithOrdinalsTermsEnum;
|
||||||
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@link org.elasticsearch.index.fielddata.IndexFieldData} impl based on global ordinals.
|
||||||
|
*/
|
||||||
|
final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData {
|
||||||
|
|
||||||
|
private final Atomic[] atomicReaders;
|
||||||
|
|
||||||
|
InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicFieldData.WithOrdinals[] segmentAfd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource[] segmentOrdToGlobalOrds, long memorySizeInBytes) {
|
||||||
|
super(index, settings, fieldNames, fieldDataType, memorySizeInBytes);
|
||||||
|
this.atomicReaders = new Atomic[segmentAfd.length];
|
||||||
|
for (int i = 0; i < segmentAfd.length; i++) {
|
||||||
|
atomicReaders[i] = new Atomic(segmentAfd[i], globalOrdToFirstSegment, globalOrdToFirstSegmentDelta, segmentOrdToGlobalOrds[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
|
||||||
|
return atomicReaders[context.ord];
|
||||||
|
}
|
||||||
|
|
||||||
|
private final class Atomic implements AtomicFieldData.WithOrdinals {
|
||||||
|
|
||||||
|
private final WithOrdinals afd;
|
||||||
|
private final OrdinalMappingSource segmentOrdToGlobalOrdLookup;
|
||||||
|
private final LongValues globalOrdToFirstSegment;
|
||||||
|
private final LongValues globalOrdToFirstSegmentDelta;
|
||||||
|
|
||||||
|
private Atomic(WithOrdinals afd, LongValues globalOrdToFirstSegment, LongValues globalOrdToFirstSegmentDelta, OrdinalMappingSource segmentOrdToGlobalOrdLookup) {
|
||||||
|
this.afd = afd;
|
||||||
|
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
|
||||||
|
this.globalOrdToFirstSegment = globalOrdToFirstSegment;
|
||||||
|
this.globalOrdToFirstSegmentDelta = globalOrdToFirstSegmentDelta;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesValues.WithOrdinals getBytesValues(boolean needsHashes) {
|
||||||
|
BytesValues.WithOrdinals values = afd.getBytesValues(false);
|
||||||
|
Ordinals.Docs segmentOrdinals = values.ordinals();
|
||||||
|
final Ordinals.Docs globalOrdinals;
|
||||||
|
if (segmentOrdToGlobalOrdLookup != null) {
|
||||||
|
globalOrdinals = segmentOrdToGlobalOrdLookup.globalOrdinals(segmentOrdinals);
|
||||||
|
} else {
|
||||||
|
globalOrdinals = segmentOrdinals;
|
||||||
|
}
|
||||||
|
final BytesValues.WithOrdinals[] bytesValues = new BytesValues.WithOrdinals[atomicReaders.length];
|
||||||
|
for (int i = 0; i < bytesValues.length; i++) {
|
||||||
|
bytesValues[i] = atomicReaders[i].afd.getBytesValues(false);
|
||||||
|
}
|
||||||
|
return new BytesValues.WithOrdinals(globalOrdinals) {
|
||||||
|
|
||||||
|
int readerIndex;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef getValueByOrd(long globalOrd) {
|
||||||
|
final long segmentOrd = globalOrd - globalOrdToFirstSegmentDelta.get(globalOrd);
|
||||||
|
readerIndex = (int) globalOrdToFirstSegment.get(globalOrd);
|
||||||
|
return bytesValues[readerIndex].getValueByOrd(segmentOrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef copyShared() {
|
||||||
|
return bytesValues[readerIndex].copyShared();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int currentValueHash() {
|
||||||
|
return bytesValues[readerIndex].currentValueHash();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isMultiValued() {
|
||||||
|
return afd.isMultiValued();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getNumberUniqueValues() {
|
||||||
|
return afd.getNumberUniqueValues();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getMemorySizeInBytes() {
|
||||||
|
return afd.getMemorySizeInBytes();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScriptDocValues getScriptValues() {
|
||||||
|
throw new UnsupportedOperationException("Script values not supported on global ordinals");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TermsEnum getTermsEnum() {
|
||||||
|
return new AtomicFieldDataWithOrdinalsTermsEnum(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -122,6 +122,10 @@ public class ParentChildAtomicFieldData implements AtomicFieldData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public WithOrdinals getAtomicFieldData(String type) {
|
||||||
|
return typeToIds.get(type);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ScriptDocValues getScriptValues() {
|
public ScriptDocValues getScriptValues() {
|
||||||
return new ScriptDocValues.Strings(getBytesValues(false));
|
return new ScriptDocValues.Strings(getBytesValues(false));
|
||||||
|
|
|
@ -25,6 +25,8 @@ import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.PagedBytes;
|
import org.apache.lucene.util.PagedBytes;
|
||||||
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.breaker.MemoryCircuitBreaker;
|
import org.elasticsearch.common.breaker.MemoryCircuitBreaker;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
|
@ -32,8 +34,8 @@ import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.fielddata.*;
|
import org.elasticsearch.index.fielddata.*;
|
||||||
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
||||||
import org.elasticsearch.search.MultiValueMode;
|
|
||||||
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
|
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
|
@ -44,6 +46,7 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
import org.elasticsearch.index.settings.IndexSettings;
|
import org.elasticsearch.index.settings.IndexSettings;
|
||||||
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
import org.elasticsearch.indices.fielddata.breaker.CircuitBreakerService;
|
||||||
|
import org.elasticsearch.search.MultiValueMode;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.NavigableSet;
|
import java.util.NavigableSet;
|
||||||
|
@ -57,6 +60,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
|
||||||
|
|
||||||
private final NavigableSet<BytesRef> parentTypes;
|
private final NavigableSet<BytesRef> parentTypes;
|
||||||
private final CircuitBreakerService breakerService;
|
private final CircuitBreakerService breakerService;
|
||||||
|
private final GlobalOrdinalsBuilder globalOrdinalsBuilder;
|
||||||
|
|
||||||
// If child type (a type with _parent field) is added or removed, we want to make sure modifications don't happen
|
// If child type (a type with _parent field) is added or removed, we want to make sure modifications don't happen
|
||||||
// while loading.
|
// while loading.
|
||||||
|
@ -64,10 +68,11 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
|
||||||
|
|
||||||
public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
|
public ParentChildIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames,
|
||||||
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
|
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
|
||||||
CircuitBreakerService breakerService) {
|
CircuitBreakerService breakerService, GlobalOrdinalsBuilder globalOrdinalsBuilder) {
|
||||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||||
parentTypes = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
|
parentTypes = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||||
this.breakerService = breakerService;
|
this.breakerService = breakerService;
|
||||||
|
this.globalOrdinalsBuilder = globalOrdinalsBuilder;
|
||||||
for (DocumentMapper documentMapper : mapperService) {
|
for (DocumentMapper documentMapper : mapperService) {
|
||||||
beforeCreate(documentMapper);
|
beforeCreate(documentMapper);
|
||||||
}
|
}
|
||||||
|
@ -155,6 +160,12 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public WithOrdinals getGlobalParentChild(String type, IndexReader indexReader) {
|
||||||
|
ParentTypesGlobalOrdinalsLoading loading = new ParentTypesGlobalOrdinalsLoading();
|
||||||
|
ParentChildGlobalOrdinalsIndexFieldData holder = (ParentChildGlobalOrdinalsIndexFieldData) loading.loadGlobal(indexReader);
|
||||||
|
return holder.type(type);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void beforeCreate(DocumentMapper mapper) {
|
public void beforeCreate(DocumentMapper mapper) {
|
||||||
synchronized (lock) {
|
synchronized (lock) {
|
||||||
|
@ -198,7 +209,8 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
|
||||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper,
|
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper<?> mapper,
|
||||||
IndexFieldDataCache cache, CircuitBreakerService breakerService,
|
IndexFieldDataCache cache, CircuitBreakerService breakerService,
|
||||||
MapperService mapperService, GlobalOrdinalsBuilder globalOrdinalBuilder) {
|
MapperService mapperService, GlobalOrdinalsBuilder globalOrdinalBuilder) {
|
||||||
return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, mapperService, breakerService);
|
return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache,
|
||||||
|
mapperService, breakerService, globalOrdinalBuilder);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,4 +263,144 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<ParentChil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private class ParentTypesGlobalOrdinalsLoading implements WithOrdinals {
|
||||||
|
|
||||||
|
public ParentTypesGlobalOrdinalsLoading() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
|
||||||
|
throw new ElasticsearchIllegalStateException("Shouldn't be invoked");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) {
|
||||||
|
throw new ElasticsearchIllegalStateException("Shouldn't be invoked");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public WithOrdinals loadGlobal(IndexReader indexReader) {
|
||||||
|
if (indexReader.leaves().size() <= 1) {
|
||||||
|
// ordinals are already global
|
||||||
|
ImmutableOpenMap.Builder<String, WithOrdinals> globalIfdPerType = ImmutableOpenMap.builder();
|
||||||
|
for (BytesRef parentType : parentTypes) {
|
||||||
|
PerType perType = new PerType(parentType.utf8ToString());
|
||||||
|
globalIfdPerType.put(perType.type, perType);
|
||||||
|
}
|
||||||
|
return new ParentChildGlobalOrdinalsIndexFieldData(globalIfdPerType.build(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return cache.load(indexReader, this);
|
||||||
|
} catch (Throwable e) {
|
||||||
|
if (e instanceof ElasticsearchException) {
|
||||||
|
throw (ElasticsearchException) e;
|
||||||
|
} else {
|
||||||
|
throw new ElasticsearchException(e.getMessage(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public WithOrdinals localGlobalDirect(IndexReader indexReader) throws Exception {
|
||||||
|
ImmutableOpenMap.Builder<String, WithOrdinals> globalIfdPerType = ImmutableOpenMap.builder();
|
||||||
|
long memorySizeInBytes = 0;
|
||||||
|
for (BytesRef parentType : parentTypes) {
|
||||||
|
PerType perType = new PerType(parentType.utf8ToString());
|
||||||
|
GlobalOrdinalsIndexFieldData globalIfd = (GlobalOrdinalsIndexFieldData) globalOrdinalsBuilder.build(indexReader, perType, indexSettings, breakerService);
|
||||||
|
globalIfdPerType.put(perType.type, globalIfd);
|
||||||
|
memorySizeInBytes += globalIfd.getMemorySizeInBytes();
|
||||||
|
}
|
||||||
|
return new ParentChildGlobalOrdinalsIndexFieldData(globalIfdPerType.build(), memorySizeInBytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldMapper.Names getFieldNames() {
|
||||||
|
return ParentChildIndexFieldData.this.getFieldNames();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldDataType getFieldDataType() {
|
||||||
|
return ParentChildIndexFieldData.this.getFieldDataType();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean valuesOrdered() {
|
||||||
|
return ParentChildIndexFieldData.this.valuesOrdered();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode) {
|
||||||
|
throw new UnsupportedOperationException("Sort not supported on PerParentTypeGlobalOrdinals...");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear(IndexReader reader) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Index index() {
|
||||||
|
return ParentChildIndexFieldData.this.index();
|
||||||
|
}
|
||||||
|
|
||||||
|
private final class PerType extends ParentTypesGlobalOrdinalsLoading {
|
||||||
|
|
||||||
|
private final String type;
|
||||||
|
|
||||||
|
public PerType(String type) {
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
|
||||||
|
return loadDirect(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AtomicFieldData.WithOrdinals loadDirect(AtomicReaderContext context) {
|
||||||
|
ParentChildAtomicFieldData parentChildAtomicFieldData = ParentChildIndexFieldData.this.load(context);
|
||||||
|
AtomicFieldData.WithOrdinals typeAfd = parentChildAtomicFieldData.getAtomicFieldData(type);
|
||||||
|
if(typeAfd != null) {
|
||||||
|
return typeAfd;
|
||||||
|
} else {
|
||||||
|
return PagedBytesAtomicFieldData.empty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public WithOrdinals loadGlobal(IndexReader indexReader) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public WithOrdinals localGlobalDirect(IndexReader indexReader) throws Exception {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Effectively this is a cache key for in the field data cache
|
||||||
|
private final class ParentChildGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData {
|
||||||
|
|
||||||
|
private final ImmutableOpenMap<String, WithOrdinals> typeGlobalOrdinals;
|
||||||
|
|
||||||
|
private ParentChildGlobalOrdinalsIndexFieldData(ImmutableOpenMap<String, WithOrdinals> typeGlobalOrdinals, long memorySizeInBytes) {
|
||||||
|
super(ParentChildIndexFieldData.this.index(), ParentChildIndexFieldData.this.indexSettings, ParentChildIndexFieldData.this.getFieldNames(), ParentChildIndexFieldData.this.getFieldDataType(), memorySizeInBytes);
|
||||||
|
this.typeGlobalOrdinals = typeGlobalOrdinals;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AtomicFieldData.WithOrdinals load(AtomicReaderContext context) {
|
||||||
|
throw new ElasticsearchIllegalStateException("Can't use directly");
|
||||||
|
}
|
||||||
|
|
||||||
|
public WithOrdinals type(String type) {
|
||||||
|
return typeGlobalOrdinals.get(type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,28 +22,22 @@ package org.elasticsearch.index.search.child;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.queries.TermFilter;
|
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.LongBitSet;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
|
||||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||||
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
||||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.util.BytesRefHash;
|
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -99,58 +93,104 @@ public class ChildrenConstantScoreQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
||||||
final SearchContext searchContext = SearchContext.current();
|
SearchContext sc = SearchContext.current();
|
||||||
final BytesRefHash parentIds = new BytesRefHash(512, searchContext.bigArrays());
|
ParentChildIndexFieldData.WithOrdinals globalIfd = parentChildIndexFieldData.getGlobalParentChild(
|
||||||
boolean releaseParentIds = true;
|
parentType, searcher.getIndexReader()
|
||||||
try {
|
);
|
||||||
final ParentIdCollector collector = new ParentIdCollector(parentType, parentChildIndexFieldData, parentIds);
|
|
||||||
assert rewrittenChildQuery != null;
|
assert rewrittenChildQuery != null;
|
||||||
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
||||||
final Query childQuery = rewrittenChildQuery;
|
|
||||||
|
final long maxOrd;
|
||||||
|
List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();
|
||||||
|
if (globalIfd == null || leaves.isEmpty()) {
|
||||||
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
|
} else {
|
||||||
|
AtomicFieldData.WithOrdinals afd = globalIfd.load(leaves.get(0));
|
||||||
|
BytesValues.WithOrdinals globalValues = afd.getBytesValues(false);
|
||||||
|
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||||
|
maxOrd = globalOrdinals.getMaxOrd();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxOrd == 0) {
|
||||||
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
Query childQuery = rewrittenChildQuery;
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
||||||
indexSearcher.setSimilarity(searcher.getSimilarity());
|
indexSearcher.setSimilarity(searcher.getSimilarity());
|
||||||
|
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, maxOrd);
|
||||||
indexSearcher.search(childQuery, collector);
|
indexSearcher.search(childQuery, collector);
|
||||||
|
|
||||||
long remaining = parentIds.size();
|
final long remaining = collector.foundParents();
|
||||||
if (remaining == 0) {
|
if (remaining == 0) {
|
||||||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
}
|
}
|
||||||
|
|
||||||
Filter shortCircuitFilter = null;
|
Filter shortCircuitFilter = null;
|
||||||
if (remaining == 1) {
|
if (remaining <= shortCircuitParentDocSet) {
|
||||||
BytesRef id = parentIds.get(0, new BytesRef());
|
shortCircuitFilter = ParentIdsFilter.createShortCircuitFilter(
|
||||||
shortCircuitFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
|
nonNestedDocsFilter, sc, parentType, collector.values, collector.parentOrds, remaining
|
||||||
} else if (remaining <= shortCircuitParentDocSet) {
|
);
|
||||||
shortCircuitFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
|
||||||
}
|
|
||||||
final ParentWeight parentWeight = new ParentWeight(parentFilter, shortCircuitFilter, parentIds);
|
|
||||||
searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
|
|
||||||
releaseParentIds = false;
|
|
||||||
return parentWeight;
|
|
||||||
} finally {
|
|
||||||
if (releaseParentIds) {
|
|
||||||
Releasables.close(parentIds);
|
|
||||||
}
|
}
|
||||||
|
return new ParentWeight(parentFilter, globalIfd, shortCircuitFilter, collector, remaining);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (this == obj) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (obj == null || obj.getClass() != this.getClass()) {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class ParentWeight extends Weight implements Releasable {
|
ChildrenConstantScoreQuery that = (ChildrenConstantScoreQuery) obj;
|
||||||
|
if (!originalChildQuery.equals(that.originalChildQuery)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!childType.equals(that.childType)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (shortCircuitParentDocSet != that.shortCircuitParentDocSet) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (getBoost() != that.getBoost()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
int result = originalChildQuery.hashCode();
|
||||||
|
result = 31 * result + childType.hashCode();
|
||||||
|
result = 31 * result + shortCircuitParentDocSet;
|
||||||
|
result = 31 * result + Float.floatToIntBits(getBoost());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString(String field) {
|
||||||
|
return "child_filter[" + childType + "/" + parentType + "](" + originalChildQuery + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
private final class ParentWeight extends Weight {
|
||||||
|
|
||||||
private final Filter parentFilter;
|
private final Filter parentFilter;
|
||||||
private final Filter shortCircuitFilter;
|
private final Filter shortCircuitFilter;
|
||||||
private final BytesRefHash parentIds;
|
private final ParentOrdCollector collector;
|
||||||
|
private final IndexFieldData.WithOrdinals globalIfd;
|
||||||
|
|
||||||
private long remaining;
|
private long remaining;
|
||||||
private float queryNorm;
|
private float queryNorm;
|
||||||
private float queryWeight;
|
private float queryWeight;
|
||||||
|
|
||||||
public ParentWeight(Filter parentFilter, Filter shortCircuitFilter, BytesRefHash parentIds) {
|
public ParentWeight(Filter parentFilter, IndexFieldData.WithOrdinals globalIfd, Filter shortCircuitFilter, ParentOrdCollector collector, long remaining) {
|
||||||
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
|
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
|
||||||
|
this.globalIfd = globalIfd;
|
||||||
this.shortCircuitFilter = shortCircuitFilter;
|
this.shortCircuitFilter = shortCircuitFilter;
|
||||||
this.parentIds = parentIds;
|
this.collector = collector;
|
||||||
this.remaining = parentIds.size();
|
this.remaining = remaining;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -194,40 +234,81 @@ public class ChildrenConstantScoreQuery extends Query {
|
||||||
|
|
||||||
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
|
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
|
||||||
if (!DocIdSets.isEmpty(parentDocIdSet)) {
|
if (!DocIdSets.isEmpty(parentDocIdSet)) {
|
||||||
BytesValues bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
|
|
||||||
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
||||||
// count down (short circuit) logic will then work as expected.
|
// count down (short circuit) logic will then work as expected.
|
||||||
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());
|
parentDocIdSet = BitsFilteredDocIdSet.wrap(parentDocIdSet, context.reader().getLiveDocs());
|
||||||
if (bytesValues != null) {
|
|
||||||
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
|
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
|
||||||
if (innerIterator != null) {
|
if (innerIterator != null) {
|
||||||
ParentDocIdIterator parentDocIdIterator = new ParentDocIdIterator(innerIterator, parentIds, bytesValues);
|
LongBitSet parentOrds = collector.parentOrds;
|
||||||
return ConstantScorer.create(parentDocIdIterator, this, queryWeight);
|
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues(false);
|
||||||
|
if (globalValues != null) {
|
||||||
|
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||||
|
DocIdSetIterator parentIdIterator = new ParentOrdIterator(innerIterator, parentOrds, globalOrdinals, this);
|
||||||
|
return ConstantScorer.create(parentIdIterator, this, queryWeight);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() throws ElasticsearchException {
|
|
||||||
Releasables.close(parentIds);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class ParentDocIdIterator extends FilteredDocIdSetIterator {
|
private final static class ParentOrdCollector extends NoopCollector {
|
||||||
|
|
||||||
private final BytesRefHash parentIds;
|
private final LongBitSet parentOrds;
|
||||||
private final BytesValues values;
|
private final ParentChildIndexFieldData.WithOrdinals indexFieldData;
|
||||||
|
|
||||||
private ParentDocIdIterator(DocIdSetIterator innerIterator, BytesRefHash parentIds, BytesValues values) {
|
private BytesValues.WithOrdinals values;
|
||||||
|
private Ordinals.Docs globalOrdinals;
|
||||||
|
|
||||||
|
private ParentOrdCollector(ParentChildIndexFieldData.WithOrdinals indexFieldData, long maxOrd) {
|
||||||
|
// TODO: look into reusing LongBitSet#bits array
|
||||||
|
this.parentOrds = new LongBitSet(maxOrd + 1);
|
||||||
|
this.indexFieldData = indexFieldData;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collect(int doc) throws IOException {
|
||||||
|
if (globalOrdinals != null) {
|
||||||
|
long globalOrdinal = globalOrdinals.getOrd(doc);
|
||||||
|
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
|
||||||
|
parentOrds.set(globalOrdinal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||||
|
values = indexFieldData.load(context).getBytesValues(false);
|
||||||
|
if (values != null) {
|
||||||
|
globalOrdinals = values.ordinals();
|
||||||
|
} else {
|
||||||
|
globalOrdinals = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
long foundParents() {
|
||||||
|
return parentOrds.cardinality();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private final static class ParentOrdIterator extends FilteredDocIdSetIterator {
|
||||||
|
|
||||||
|
private final LongBitSet parentOrds;
|
||||||
|
private final Ordinals.Docs ordinals;
|
||||||
|
private final ParentWeight parentWeight;
|
||||||
|
|
||||||
|
private ParentOrdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, Ordinals.Docs ordinals, ParentWeight parentWeight) {
|
||||||
super(innerIterator);
|
super(innerIterator);
|
||||||
this.parentIds = parentIds;
|
this.parentOrds = parentOrds;
|
||||||
this.values = values;
|
this.ordinals = ordinals;
|
||||||
|
this.parentWeight = parentWeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean match(int doc) {
|
protected boolean match(int doc) {
|
||||||
if (remaining == 0) {
|
if (parentWeight.remaining == 0) {
|
||||||
try {
|
try {
|
||||||
advance(DocIdSetIterator.NO_MORE_DOCS);
|
advance(DocIdSetIterator.NO_MORE_DOCS);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -236,105 +317,16 @@ public class ChildrenConstantScoreQuery extends Query {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
values.setDocument(doc);
|
long parentOrd = ordinals.getOrd(doc);
|
||||||
BytesRef parentId = values.nextValue();
|
if (parentOrd != Ordinals.MISSING_ORDINAL) {
|
||||||
int hash = values.currentValueHash();
|
boolean match = parentOrds.get(parentOrd);
|
||||||
boolean match = parentIds.find(parentId, hash) >= 0;
|
|
||||||
if (match) {
|
if (match) {
|
||||||
remaining--;
|
parentWeight.remaining--;
|
||||||
}
|
}
|
||||||
return match;
|
return match;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static class ParentIdCollector extends NoopCollector {
|
|
||||||
|
|
||||||
private final BytesRefHash parentIds;
|
|
||||||
private final String parentType;
|
|
||||||
private final ParentChildIndexFieldData indexFieldData;
|
|
||||||
|
|
||||||
protected BytesValues.WithOrdinals values;
|
|
||||||
private Ordinals.Docs ordinals;
|
|
||||||
|
|
||||||
// This remembers what ordinals have already been seen in the current segment
|
|
||||||
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
|
|
||||||
private FixedBitSet seenOrdinals;
|
|
||||||
|
|
||||||
protected ParentIdCollector(String parentType, ParentChildIndexFieldData indexFieldData, BytesRefHash parentIds) {
|
|
||||||
this.parentType = parentType;
|
|
||||||
this.indexFieldData = indexFieldData;
|
|
||||||
this.parentIds = parentIds;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
if (values != null) {
|
|
||||||
int ord = (int) ordinals.getOrd(doc);
|
|
||||||
if (!seenOrdinals.get(ord)) {
|
|
||||||
final BytesRef bytes = values.getValueByOrd(ord);
|
|
||||||
final int hash = values.currentValueHash();
|
|
||||||
parentIds.add(bytes, hash);
|
|
||||||
seenOrdinals.set(ord);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
|
||||||
values = indexFieldData.load(context).getBytesValues(parentType);
|
|
||||||
if (values != null) {
|
|
||||||
ordinals = values.ordinals();
|
|
||||||
final int maxOrd = (int) ordinals.getMaxOrd();
|
|
||||||
if (seenOrdinals == null || seenOrdinals.length() < maxOrd) {
|
|
||||||
seenOrdinals = new FixedBitSet(maxOrd);
|
|
||||||
} else {
|
|
||||||
seenOrdinals.clear(0, maxOrd);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object obj) {
|
|
||||||
if (this == obj) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (obj == null || obj.getClass() != this.getClass()) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
ChildrenConstantScoreQuery that = (ChildrenConstantScoreQuery) obj;
|
|
||||||
if (!originalChildQuery.equals(that.originalChildQuery)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!childType.equals(that.childType)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (shortCircuitParentDocSet != that.shortCircuitParentDocSet) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (getBoost() != that.getBoost()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
int result = originalChildQuery.hashCode();
|
|
||||||
result = 31 * result + childType.hashCode();
|
|
||||||
result = 31 * result + shortCircuitParentDocSet;
|
|
||||||
result = 31 * result + Float.floatToIntBits(getBoost());
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString(String field) {
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
sb.append("child_filter[").append(childType).append("/").append(parentType).append("](").append(originalChildQuery).append(')');
|
|
||||||
return sb.toString();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,31 +21,28 @@ package org.elasticsearch.index.search.child;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.queries.TermFilter;
|
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
|
||||||
import org.apache.lucene.util.ToStringUtils;
|
import org.apache.lucene.util.ToStringUtils;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||||
import org.elasticsearch.common.lucene.search.AndFilter;
|
|
||||||
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
||||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.util.*;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
|
import org.elasticsearch.common.util.FloatArray;
|
||||||
|
import org.elasticsearch.common.util.IntArray;
|
||||||
|
import org.elasticsearch.common.util.LongHash;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -59,7 +56,7 @@ import java.util.Set;
|
||||||
*/
|
*/
|
||||||
public class ChildrenQuery extends Query {
|
public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
private final ParentChildIndexFieldData parentChildIndexFieldData;
|
private final ParentChildIndexFieldData ifd;
|
||||||
private final String parentType;
|
private final String parentType;
|
||||||
private final String childType;
|
private final String childType;
|
||||||
private final Filter parentFilter;
|
private final Filter parentFilter;
|
||||||
|
@ -71,8 +68,8 @@ public class ChildrenQuery extends Query {
|
||||||
private Query rewrittenChildQuery;
|
private Query rewrittenChildQuery;
|
||||||
private IndexReader rewriteIndexReader;
|
private IndexReader rewriteIndexReader;
|
||||||
|
|
||||||
public ChildrenQuery(ParentChildIndexFieldData parentChildIndexFieldData, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet, Filter nonNestedDocsFilter) {
|
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int shortCircuitParentDocSet, Filter nonNestedDocsFilter) {
|
||||||
this.parentChildIndexFieldData = parentChildIndexFieldData;
|
this.ifd = ifd;
|
||||||
this.parentType = parentType;
|
this.parentType = parentType;
|
||||||
this.childType = childType;
|
this.childType = childType;
|
||||||
this.parentFilter = parentFilter;
|
this.parentFilter = parentFilter;
|
||||||
|
@ -114,10 +111,8 @@ public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString(String field) {
|
public String toString(String field) {
|
||||||
StringBuilder sb = new StringBuilder();
|
return "ChildrenQuery[" + childType + "/" + parentType + "](" + originalChildQuery
|
||||||
sb.append("ChildrenQuery[").append(childType).append("/").append(parentType).append("](").append(originalChildQuery
|
.toString(field) + ')' + ToStringUtils.boost(getBoost());
|
||||||
.toString(field)).append(')').append(ToStringUtils.boost(getBoost()));
|
|
||||||
return sb.toString();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -147,100 +142,72 @@ public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
||||||
SearchContext searchContext = SearchContext.current();
|
SearchContext sc = SearchContext.current();
|
||||||
assert rewrittenChildQuery != null;
|
assert rewrittenChildQuery != null;
|
||||||
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
||||||
final Query childQuery = rewrittenChildQuery;
|
final Query childQuery = rewrittenChildQuery;
|
||||||
|
|
||||||
|
IndexFieldData.WithOrdinals globalIfd = ifd.getGlobalParentChild(parentType, searcher.getIndexReader());
|
||||||
|
if (globalIfd == null) {
|
||||||
|
// No docs of the specified type don't exist on this shard
|
||||||
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
|
}
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
||||||
indexSearcher.setSimilarity(searcher.getSimilarity());
|
indexSearcher.setSimilarity(searcher.getSimilarity());
|
||||||
|
|
||||||
final BytesRefHash parentIds;
|
boolean abort = true;
|
||||||
final FloatArray scores;
|
long numFoundParents;
|
||||||
final IntArray occurrences;
|
ParentOrdAndScoreCollector collector = null;
|
||||||
|
try {
|
||||||
switch (scoreType) {
|
switch (scoreType) {
|
||||||
case MAX:
|
case MAX:
|
||||||
MaxCollector maxCollector = new MaxCollector(parentChildIndexFieldData, parentType, searchContext);
|
collector = new MaxCollector(globalIfd, sc);
|
||||||
try {
|
|
||||||
indexSearcher.search(childQuery, maxCollector);
|
|
||||||
parentIds = maxCollector.parentIds;
|
|
||||||
scores = maxCollector.scores;
|
|
||||||
occurrences = null;
|
|
||||||
} finally {
|
|
||||||
Releasables.close(maxCollector.parentIdsIndex);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case SUM:
|
case SUM:
|
||||||
SumCollector sumCollector = new SumCollector(parentChildIndexFieldData, parentType, searchContext);
|
collector = new SumCollector(globalIfd, sc);
|
||||||
try {
|
|
||||||
indexSearcher.search(childQuery, sumCollector);
|
|
||||||
parentIds = sumCollector.parentIds;
|
|
||||||
scores = sumCollector.scores;
|
|
||||||
occurrences = null;
|
|
||||||
} finally {
|
|
||||||
Releasables.close(sumCollector.parentIdsIndex);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case AVG:
|
case AVG:
|
||||||
AvgCollector avgCollector = new AvgCollector(parentChildIndexFieldData, parentType, searchContext);
|
collector = new AvgCollector(globalIfd, sc);
|
||||||
try {
|
|
||||||
indexSearcher.search(childQuery, avgCollector);
|
|
||||||
parentIds = avgCollector.parentIds;
|
|
||||||
scores = avgCollector.scores;
|
|
||||||
occurrences = avgCollector.occurrences;
|
|
||||||
} finally {
|
|
||||||
Releasables.close(avgCollector.parentIdsIndex);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
|
throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
|
||||||
}
|
}
|
||||||
|
indexSearcher.search(childQuery, collector);
|
||||||
int size = (int) parentIds.size();
|
numFoundParents = collector.foundParents();
|
||||||
if (size == 0) {
|
if (numFoundParents == 0) {
|
||||||
Releasables.close(parentIds, scores, occurrences);
|
|
||||||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
}
|
}
|
||||||
|
abort = false;
|
||||||
final Filter parentFilter;
|
} finally {
|
||||||
if (size == 1) {
|
if (abort) {
|
||||||
BytesRef id = parentIds.get(0, new BytesRef());
|
Releasables.close(collector);
|
||||||
if (nonNestedDocsFilter != null) {
|
|
||||||
List<Filter> filters = Arrays.asList(
|
|
||||||
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
|
||||||
nonNestedDocsFilter
|
|
||||||
);
|
|
||||||
parentFilter = new AndFilter(filters);
|
|
||||||
} else {
|
|
||||||
parentFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
|
|
||||||
}
|
}
|
||||||
} else if (size <= shortCircuitParentDocSet) {
|
}
|
||||||
parentFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
sc.addReleasable(collector, Lifetime.COLLECTION);
|
||||||
|
final Filter parentFilter;
|
||||||
|
if (numFoundParents <= shortCircuitParentDocSet) {
|
||||||
|
parentFilter = ParentIdsFilter.createShortCircuitFilter(
|
||||||
|
nonNestedDocsFilter, sc, parentType, collector.values, collector.parentIdxs, numFoundParents
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
|
parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
|
||||||
}
|
}
|
||||||
ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, size, parentIds, scores, occurrences);
|
return new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, numFoundParents, collector);
|
||||||
searchContext.addReleasable(parentWeight, Lifetime.COLLECTION);
|
|
||||||
return parentWeight;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class ParentWeight extends Weight implements Releasable {
|
private final class ParentWeight extends Weight {
|
||||||
|
|
||||||
private final Weight childWeight;
|
private final Weight childWeight;
|
||||||
private final Filter parentFilter;
|
private final Filter parentFilter;
|
||||||
private final BytesRefHash parentIds;
|
private final ParentOrdAndScoreCollector collector;
|
||||||
private final FloatArray scores;
|
|
||||||
private final IntArray occurrences;
|
|
||||||
|
|
||||||
private int remaining;
|
private long remaining;
|
||||||
|
|
||||||
private ParentWeight(Weight childWeight, Filter parentFilter, int remaining, BytesRefHash parentIds, FloatArray scores, IntArray occurrences) {
|
private ParentWeight(Weight childWeight, Filter parentFilter, long remaining, ParentOrdAndScoreCollector collector) {
|
||||||
this.childWeight = childWeight;
|
this.childWeight = childWeight;
|
||||||
this.parentFilter = parentFilter;
|
this.parentFilter = parentFilter;
|
||||||
this.remaining = remaining;
|
this.remaining = remaining;
|
||||||
this.parentIds = parentIds;
|
this.collector = collector;
|
||||||
this.scores = scores;
|
|
||||||
this.occurrences = occurrences;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -271,44 +238,169 @@ public class ChildrenQuery extends Query {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
BytesValues bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
|
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
||||||
|
// count down (short circuit) logic will then work as expected.
|
||||||
|
DocIdSetIterator parents = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
|
||||||
|
BytesValues.WithOrdinals bytesValues = collector.globalIfd.load(context).getBytesValues(false);
|
||||||
if (bytesValues == null) {
|
if (bytesValues == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
|
|
||||||
// count down (short circuit) logic will then work as expected.
|
|
||||||
DocIdSetIterator parentsIterator = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
|
|
||||||
switch (scoreType) {
|
switch (scoreType) {
|
||||||
case AVG:
|
case AVG:
|
||||||
return new AvgParentScorer(this, bytesValues, parentIds, scores, occurrences, parentsIterator);
|
return new AvgParentScorer(this, parents, collector, bytesValues.ordinals());
|
||||||
default:
|
default:
|
||||||
return new ParentScorer(this, bytesValues, parentIds, scores, parentsIterator);
|
return new ParentScorer(this, parents, collector, bytesValues.ordinals());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private abstract static class ParentOrdAndScoreCollector extends NoopCollector implements Releasable {
|
||||||
|
|
||||||
|
private final IndexFieldData.WithOrdinals globalIfd;
|
||||||
|
protected final LongHash parentIdxs;
|
||||||
|
protected final BigArrays bigArrays;
|
||||||
|
protected FloatArray scores;
|
||||||
|
protected final SearchContext searchContext;
|
||||||
|
|
||||||
|
protected Ordinals.Docs globalOrdinals;
|
||||||
|
protected BytesValues.WithOrdinals values;
|
||||||
|
protected Scorer scorer;
|
||||||
|
|
||||||
|
private ParentOrdAndScoreCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
|
||||||
|
this.globalIfd = globalIfd;
|
||||||
|
this.bigArrays = searchContext.bigArrays();
|
||||||
|
this.parentIdxs = new LongHash(512, bigArrays);
|
||||||
|
this.scores = bigArrays.newFloatArray(512, false);
|
||||||
|
this.searchContext = searchContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collect(int doc) throws IOException {
|
||||||
|
if (globalOrdinals != null) {
|
||||||
|
final long globalOrdinal = globalOrdinals.getOrd(doc);
|
||||||
|
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
|
||||||
|
long parentIdx = parentIdxs.add(globalOrdinal);
|
||||||
|
if (parentIdx >= 0) {
|
||||||
|
scores = bigArrays.grow(scores, parentIdx + 1);
|
||||||
|
scores.set(parentIdx, scorer.score());
|
||||||
|
} else {
|
||||||
|
parentIdx = -1 - parentIdx;
|
||||||
|
doScore(parentIdx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void doScore(long index) throws IOException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||||
|
values = globalIfd.load(context).getBytesValues(false);
|
||||||
|
if (values != null) {
|
||||||
|
globalOrdinals = values.ordinals();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public long foundParents() {
|
||||||
|
return parentIdxs.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setScorer(Scorer scorer) throws IOException {
|
||||||
|
this.scorer = scorer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws ElasticsearchException {
|
||||||
|
Releasables.close(parentIdxs, scores);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final static class SumCollector extends ParentOrdAndScoreCollector {
|
||||||
|
|
||||||
|
private SumCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
|
||||||
|
super(globalIfd, searchContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doScore(long index) throws IOException {
|
||||||
|
scores.increment(index, scorer.score());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final static class MaxCollector extends ParentOrdAndScoreCollector {
|
||||||
|
|
||||||
|
private MaxCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
|
||||||
|
super(globalIfd, searchContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void doScore(long index) throws IOException {
|
||||||
|
float currentScore = scorer.score();
|
||||||
|
if (currentScore > scores.get(index)) {
|
||||||
|
scores.set(index, currentScore);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final static class AvgCollector extends ParentOrdAndScoreCollector {
|
||||||
|
|
||||||
|
private IntArray occurrences;
|
||||||
|
|
||||||
|
AvgCollector(IndexFieldData.WithOrdinals globalIfd, SearchContext searchContext) {
|
||||||
|
super(globalIfd, searchContext);
|
||||||
|
this.occurrences = bigArrays.newIntArray(512, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collect(int doc) throws IOException {
|
||||||
|
if (globalOrdinals != null) {
|
||||||
|
final long globalOrdinal = globalOrdinals.getOrd(doc);
|
||||||
|
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
|
||||||
|
long parentIdx = parentIdxs.add(globalOrdinal);
|
||||||
|
if (parentIdx >= 0) {
|
||||||
|
scores = bigArrays.grow(scores, parentIdx + 1);
|
||||||
|
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
|
||||||
|
scores.set(parentIdx, scorer.score());
|
||||||
|
occurrences.set(parentIdx, 1);
|
||||||
|
} else {
|
||||||
|
parentIdx = -1 - parentIdx;
|
||||||
|
scores.increment(parentIdx, scorer.score());
|
||||||
|
occurrences.increment(parentIdx, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() throws ElasticsearchException {
|
public void close() throws ElasticsearchException {
|
||||||
Releasables.close(parentIds, scores, occurrences);
|
Releasables.close(parentIdxs, scores, occurrences);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class ParentScorer extends Scorer {
|
private static class ParentScorer extends Scorer {
|
||||||
|
|
||||||
final BytesRefHash parentIds;
|
final ParentWeight parentWeight;
|
||||||
|
final LongHash parentIds;
|
||||||
final FloatArray scores;
|
final FloatArray scores;
|
||||||
|
|
||||||
final BytesValues bytesValues;
|
final Ordinals.Docs globalOrdinals;
|
||||||
final DocIdSetIterator parentsIterator;
|
final DocIdSetIterator parentsIterator;
|
||||||
|
|
||||||
int currentDocId = -1;
|
int currentDocId = -1;
|
||||||
float currentScore;
|
float currentScore;
|
||||||
|
|
||||||
ParentScorer(Weight weight, BytesValues bytesValues, BytesRefHash parentIds, FloatArray scores, DocIdSetIterator parentsIterator) {
|
ParentScorer(ParentWeight parentWeight, DocIdSetIterator parentsIterator, ParentOrdAndScoreCollector collector, Ordinals.Docs globalOrdinals) {
|
||||||
super(weight);
|
super(parentWeight);
|
||||||
this.bytesValues = bytesValues;
|
this.parentWeight = parentWeight;
|
||||||
|
this.globalOrdinals = globalOrdinals;
|
||||||
this.parentsIterator = parentsIterator;
|
this.parentsIterator = parentsIterator;
|
||||||
this.parentIds = parentIds;
|
this.parentIds = collector.parentIdxs;
|
||||||
this.scores = scores;
|
this.scores = collector.scores;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -330,7 +422,7 @@ public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
if (remaining == 0) {
|
if (parentWeight.remaining == 0) {
|
||||||
return currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -340,11 +432,15 @@ public class ChildrenQuery extends Query {
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
}
|
}
|
||||||
|
|
||||||
bytesValues.setDocument(currentDocId);
|
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
|
||||||
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
|
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
|
||||||
if (index != -1) {
|
continue;
|
||||||
currentScore = scores.get(index);
|
}
|
||||||
remaining--;
|
|
||||||
|
final long parentIdx = parentIds.find(globalOrdinal);
|
||||||
|
if (parentIdx != -1) {
|
||||||
|
currentScore = scores.get(parentIdx);
|
||||||
|
parentWeight.remaining--;
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -352,7 +448,7 @@ public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
if (remaining == 0) {
|
if (parentWeight.remaining == 0) {
|
||||||
return currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -361,11 +457,15 @@ public class ChildrenQuery extends Query {
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
}
|
}
|
||||||
|
|
||||||
bytesValues.setDocument(currentDocId);
|
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
|
||||||
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
|
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
|
||||||
if (index != -1) {
|
return nextDoc();
|
||||||
currentScore = scores.get(index);
|
}
|
||||||
remaining--;
|
|
||||||
|
final long parentIdx = parentIds.find(globalOrdinal);
|
||||||
|
if (parentIdx != -1) {
|
||||||
|
currentScore = scores.get(parentIdx);
|
||||||
|
parentWeight.remaining--;
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
} else {
|
} else {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
|
@ -378,18 +478,18 @@ public class ChildrenQuery extends Query {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class AvgParentScorer extends ParentScorer {
|
private static final class AvgParentScorer extends ParentScorer {
|
||||||
|
|
||||||
final IntArray occurrences;
|
private final IntArray occurrences;
|
||||||
|
|
||||||
AvgParentScorer(Weight weight, BytesValues values, BytesRefHash parentIds, FloatArray scores, IntArray occurrences, DocIdSetIterator parentsIterator) {
|
AvgParentScorer(ParentWeight weight, DocIdSetIterator parentsIterator, ParentOrdAndScoreCollector collector, Ordinals.Docs globalOrdinals) {
|
||||||
super(weight, values, parentIds, scores, parentsIterator);
|
super(weight, parentsIterator, collector, globalOrdinals);
|
||||||
this.occurrences = occurrences;
|
this.occurrences = ((AvgCollector) collector).occurrences;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int nextDoc() throws IOException {
|
public int nextDoc() throws IOException {
|
||||||
if (remaining == 0) {
|
if (parentWeight.remaining == 0) {
|
||||||
return currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,12 +499,16 @@ public class ChildrenQuery extends Query {
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
}
|
}
|
||||||
|
|
||||||
bytesValues.setDocument(currentDocId);
|
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
|
||||||
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
|
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
|
||||||
if (index != -1) {
|
continue;
|
||||||
currentScore = scores.get(index);
|
}
|
||||||
currentScore /= occurrences.get(index);
|
|
||||||
remaining--;
|
final long parentIdx = parentIds.find(globalOrdinal);
|
||||||
|
if (parentIdx != -1) {
|
||||||
|
currentScore = scores.get(parentIdx);
|
||||||
|
currentScore /= occurrences.get(parentIdx);
|
||||||
|
parentWeight.remaining--;
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -412,7 +516,7 @@ public class ChildrenQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
if (remaining == 0) {
|
if (parentWeight.remaining == 0) {
|
||||||
return currentDocId = NO_MORE_DOCS;
|
return currentDocId = NO_MORE_DOCS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -421,12 +525,16 @@ public class ChildrenQuery extends Query {
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
}
|
}
|
||||||
|
|
||||||
bytesValues.setDocument(currentDocId);
|
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
|
||||||
long index = parentIds.find(bytesValues.nextValue(), bytesValues.currentValueHash());
|
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
|
||||||
if (index != -1) {
|
return nextDoc();
|
||||||
currentScore = scores.get(index);
|
}
|
||||||
currentScore /= occurrences.get(index);
|
|
||||||
remaining--;
|
final long parentIdx = parentIds.find(globalOrdinal);
|
||||||
|
if (parentIdx != -1) {
|
||||||
|
currentScore = scores.get(parentIdx);
|
||||||
|
currentScore /= occurrences.get(parentIdx);
|
||||||
|
parentWeight.remaining--;
|
||||||
return currentDocId;
|
return currentDocId;
|
||||||
} else {
|
} else {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
|
@ -435,145 +543,3 @@ public class ChildrenQuery extends Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private abstract static class ParentIdAndScoreCollector extends NoopCollector {
|
|
||||||
|
|
||||||
final BytesRefHash parentIds;
|
|
||||||
protected final String parentType;
|
|
||||||
private final ParentChildIndexFieldData indexFieldData;
|
|
||||||
protected final BigArrays bigArrays;
|
|
||||||
|
|
||||||
protected FloatArray scores;
|
|
||||||
|
|
||||||
protected BytesValues.WithOrdinals values;
|
|
||||||
protected Ordinals.Docs ordinals;
|
|
||||||
protected Scorer scorer;
|
|
||||||
|
|
||||||
// This remembers what ordinals have already been seen in the current segment
|
|
||||||
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
|
|
||||||
protected LongArray parentIdsIndex;
|
|
||||||
|
|
||||||
private ParentIdAndScoreCollector(ParentChildIndexFieldData indexFieldData, String parentType, SearchContext searchContext) {
|
|
||||||
this.parentType = parentType;
|
|
||||||
this.indexFieldData = indexFieldData;
|
|
||||||
this.bigArrays = searchContext.bigArrays();
|
|
||||||
this.parentIds = new BytesRefHash(512, bigArrays);
|
|
||||||
this.scores = bigArrays.newFloatArray(512, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
if (values != null) {
|
|
||||||
long ord = ordinals.getOrd(doc);
|
|
||||||
long parentIdx = parentIdsIndex.get(ord);
|
|
||||||
if (parentIdx < 0) {
|
|
||||||
final BytesRef bytes = values.getValueByOrd(ord);
|
|
||||||
final int hash = values.currentValueHash();
|
|
||||||
parentIdx = parentIds.add(bytes, hash);
|
|
||||||
if (parentIdx < 0) {
|
|
||||||
parentIdx = -parentIdx - 1;
|
|
||||||
doScore(parentIdx);
|
|
||||||
} else {
|
|
||||||
scores = bigArrays.grow(scores, parentIdx + 1);
|
|
||||||
scores.set(parentIdx, scorer.score());
|
|
||||||
}
|
|
||||||
parentIdsIndex.set(ord, parentIdx);
|
|
||||||
} else {
|
|
||||||
doScore(parentIdx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void doScore(long index) throws IOException {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
|
||||||
values = indexFieldData.load(context).getBytesValues(parentType);
|
|
||||||
if (values != null) {
|
|
||||||
ordinals = values.ordinals();
|
|
||||||
final long maxOrd = ordinals.getMaxOrd();
|
|
||||||
if (parentIdsIndex == null) {
|
|
||||||
parentIdsIndex = bigArrays.newLongArray(BigArrays.overSize(maxOrd), false);
|
|
||||||
} else if (parentIdsIndex.size() < maxOrd) {
|
|
||||||
parentIdsIndex = bigArrays.grow(parentIdsIndex, maxOrd);
|
|
||||||
}
|
|
||||||
parentIdsIndex.fill(0, maxOrd, -1L);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setScorer(Scorer scorer) throws IOException {
|
|
||||||
this.scorer = scorer;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static class SumCollector extends ParentIdAndScoreCollector {
|
|
||||||
|
|
||||||
private SumCollector(ParentChildIndexFieldData indexFieldData, String parentType, SearchContext searchContext) {
|
|
||||||
super(indexFieldData, parentType, searchContext);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doScore(long index) throws IOException {
|
|
||||||
scores.increment(index, scorer.score());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static class MaxCollector extends ParentIdAndScoreCollector {
|
|
||||||
|
|
||||||
private MaxCollector(ParentChildIndexFieldData indexFieldData, String childType, SearchContext searchContext) {
|
|
||||||
super(indexFieldData, childType, searchContext);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doScore(long index) throws IOException {
|
|
||||||
float currentScore = scorer.score();
|
|
||||||
if (currentScore > scores.get(index)) {
|
|
||||||
scores.set(index, currentScore);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static class AvgCollector extends ParentIdAndScoreCollector {
|
|
||||||
|
|
||||||
private IntArray occurrences;
|
|
||||||
|
|
||||||
AvgCollector(ParentChildIndexFieldData indexFieldData, String childType, SearchContext searchContext) {
|
|
||||||
super(indexFieldData, childType, searchContext);
|
|
||||||
this.occurrences = bigArrays.newIntArray(512, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
if (values != null) {
|
|
||||||
int ord = (int) ordinals.getOrd(doc);
|
|
||||||
long parentIdx = parentIdsIndex.get(ord);
|
|
||||||
if (parentIdx < 0) {
|
|
||||||
final BytesRef bytes = values.getValueByOrd(ord);
|
|
||||||
final int hash = values.currentValueHash();
|
|
||||||
parentIdx = parentIds.add(bytes, hash);
|
|
||||||
if (parentIdx < 0) {
|
|
||||||
parentIdx = -parentIdx - 1;
|
|
||||||
scores.increment(parentIdx, scorer.score());
|
|
||||||
occurrences.increment(parentIdx, 1);
|
|
||||||
} else {
|
|
||||||
scores = bigArrays.grow(scores, parentIdx + 1);
|
|
||||||
scores.set(parentIdx, scorer.score());
|
|
||||||
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
|
|
||||||
occurrences.set(parentIdx, 1);
|
|
||||||
}
|
|
||||||
parentIdsIndex.set(ord, parentIdx);
|
|
||||||
} else {
|
|
||||||
scores.increment(parentIdx, scorer.score());
|
|
||||||
occurrences.increment(parentIdx, 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
|
@ -23,22 +23,19 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.LongBitSet;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
|
||||||
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
import org.elasticsearch.common.lucene.docset.DocIdSets;
|
||||||
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
||||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.util.BytesRefHash;
|
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -88,174 +85,36 @@ public class ParentConstantScoreQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
||||||
final SearchContext searchContext = SearchContext.current();
|
IndexFieldData.WithOrdinals globalIfd = parentChildIndexFieldData.getGlobalParentChild(parentType, searcher.getIndexReader());
|
||||||
final BytesRefHash parentIds = new BytesRefHash(512, searchContext.bigArrays());
|
|
||||||
boolean releaseParentIds = true;
|
|
||||||
try {
|
|
||||||
ParentIdsCollector collector = new ParentIdsCollector(parentType, parentChildIndexFieldData, parentIds);
|
|
||||||
assert rewrittenParentQuery != null;
|
assert rewrittenParentQuery != null;
|
||||||
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
||||||
|
|
||||||
|
final long maxOrd;
|
||||||
|
List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();
|
||||||
|
if (globalIfd == null || leaves.isEmpty()) {
|
||||||
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
|
} else {
|
||||||
|
AtomicFieldData.WithOrdinals afd = globalIfd.load(leaves.get(0));
|
||||||
|
BytesValues.WithOrdinals globalValues = afd.getBytesValues(false);
|
||||||
|
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||||
|
maxOrd = globalOrdinals.getMaxOrd();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (maxOrd == 0) {
|
||||||
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
|
}
|
||||||
|
|
||||||
final Query parentQuery = rewrittenParentQuery;
|
final Query parentQuery = rewrittenParentQuery;
|
||||||
|
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd);
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
||||||
indexSearcher.setSimilarity(searcher.getSimilarity());
|
indexSearcher.setSimilarity(searcher.getSimilarity());
|
||||||
indexSearcher.search(parentQuery, collector);
|
indexSearcher.search(parentQuery, collector);
|
||||||
|
|
||||||
if (parentIds.size() == 0) {
|
if (collector.parentCount() == 0) {
|
||||||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
}
|
}
|
||||||
|
|
||||||
final ChildrenWeight childrenWeight = new ChildrenWeight(childrenFilter, parentIds);
|
return new ChildrenWeight(childrenFilter, collector, globalIfd);
|
||||||
searchContext.addReleasable(childrenWeight, Lifetime.COLLECTION);
|
|
||||||
releaseParentIds = false;
|
|
||||||
return childrenWeight;
|
|
||||||
} finally {
|
|
||||||
if (releaseParentIds) {
|
|
||||||
Releasables.close(parentIds);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final class ChildrenWeight extends Weight implements Releasable {
|
|
||||||
|
|
||||||
private final Filter childrenFilter;
|
|
||||||
private final BytesRefHash parentIds;
|
|
||||||
|
|
||||||
private float queryNorm;
|
|
||||||
private float queryWeight;
|
|
||||||
|
|
||||||
private FixedBitSet seenOrdinalsCache;
|
|
||||||
private FixedBitSet seenMatchedOrdinalsCache;
|
|
||||||
|
|
||||||
private ChildrenWeight(Filter childrenFilter, BytesRefHash parentIds) {
|
|
||||||
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
|
|
||||||
this.parentIds = parentIds;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
|
|
||||||
return new Explanation(getBoost(), "not implemented yet...");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Query getQuery() {
|
|
||||||
return ParentConstantScoreQuery.this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public float getValueForNormalization() throws IOException {
|
|
||||||
queryWeight = getBoost();
|
|
||||||
return queryWeight * queryWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void normalize(float norm, float topLevelBoost) {
|
|
||||||
this.queryNorm = norm * topLevelBoost;
|
|
||||||
queryWeight *= this.queryNorm;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
|
||||||
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
|
|
||||||
if (DocIdSets.isEmpty(childrenDocIdSet)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
BytesValues.WithOrdinals bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
|
|
||||||
if (bytesValues != null) {
|
|
||||||
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
|
|
||||||
if (innerIterator != null) {
|
|
||||||
Ordinals.Docs ordinals = bytesValues.ordinals();
|
|
||||||
int maxOrd = (int) ordinals.getMaxOrd();
|
|
||||||
if (seenOrdinalsCache == null || seenOrdinalsCache.length() < maxOrd) {
|
|
||||||
seenOrdinalsCache = new FixedBitSet(maxOrd);
|
|
||||||
seenMatchedOrdinalsCache = new FixedBitSet(maxOrd);
|
|
||||||
} else {
|
|
||||||
seenOrdinalsCache.clear(0, maxOrd);
|
|
||||||
seenMatchedOrdinalsCache.clear(0, maxOrd);
|
|
||||||
}
|
|
||||||
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(
|
|
||||||
innerIterator, parentIds, bytesValues, ordinals, seenOrdinalsCache, seenMatchedOrdinalsCache
|
|
||||||
);
|
|
||||||
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() throws ElasticsearchException {
|
|
||||||
Releasables.close(parentIds);
|
|
||||||
}
|
|
||||||
|
|
||||||
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
|
|
||||||
|
|
||||||
private final BytesRefHash parentIds;
|
|
||||||
private final BytesValues.WithOrdinals bytesValues;
|
|
||||||
private final Ordinals.Docs ordinals;
|
|
||||||
|
|
||||||
// This remembers what ordinals have already been emitted in the current segment
|
|
||||||
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
|
|
||||||
private final FixedBitSet seenOrdinals;
|
|
||||||
private final FixedBitSet seenMatchedOrdinals;
|
|
||||||
|
|
||||||
ChildrenDocIdIterator(DocIdSetIterator innerIterator, BytesRefHash parentIds, BytesValues.WithOrdinals bytesValues, Ordinals.Docs ordinals, FixedBitSet seenOrdinals, FixedBitSet seenMatchedOrdinals) {
|
|
||||||
super(innerIterator);
|
|
||||||
this.parentIds = parentIds;
|
|
||||||
this.bytesValues = bytesValues;
|
|
||||||
this.ordinals = ordinals;
|
|
||||||
this.seenOrdinals = seenOrdinals;
|
|
||||||
this.seenMatchedOrdinals = seenMatchedOrdinals;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected boolean match(int doc) {
|
|
||||||
int ord = (int) ordinals.getOrd(doc);
|
|
||||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!seenOrdinals.get(ord)) {
|
|
||||||
seenOrdinals.set(ord);
|
|
||||||
if (parentIds.find(bytesValues.getValueByOrd(ord), bytesValues.currentValueHash()) >= 0) {
|
|
||||||
seenMatchedOrdinals.set(ord);
|
|
||||||
return true;
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return seenMatchedOrdinals.get(ord);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static class ParentIdsCollector extends NoopCollector {
|
|
||||||
|
|
||||||
private final BytesRefHash parentIds;
|
|
||||||
private final ParentChildIndexFieldData indexFieldData;
|
|
||||||
private final String parentType;
|
|
||||||
|
|
||||||
private BytesValues values;
|
|
||||||
|
|
||||||
ParentIdsCollector(String parentType, ParentChildIndexFieldData indexFieldData, BytesRefHash parentIds) {
|
|
||||||
this.parentIds = parentIds;
|
|
||||||
this.indexFieldData = indexFieldData;
|
|
||||||
this.parentType = parentType;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void collect(int doc) throws IOException {
|
|
||||||
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
|
||||||
if (values != null) {
|
|
||||||
values.setDocument(doc);
|
|
||||||
parentIds.add(values.nextValue(), values.currentValueHash());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
|
||||||
values = indexFieldData.load(readerContext).getBytesValues(parentType);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -290,9 +149,125 @@ public class ParentConstantScoreQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString(String field) {
|
public String toString(String field) {
|
||||||
StringBuilder sb = new StringBuilder();
|
return "parent_filter[" + parentType + "](" + originalParentQuery + ')';
|
||||||
sb.append("parent_filter[").append(parentType).append("](").append(originalParentQuery).append(')');
|
}
|
||||||
return sb.toString();
|
|
||||||
|
private final class ChildrenWeight extends Weight {
|
||||||
|
|
||||||
|
private final IndexFieldData.WithOrdinals globalIfd;
|
||||||
|
private final Filter childrenFilter;
|
||||||
|
private final LongBitSet parentOrds;
|
||||||
|
|
||||||
|
private float queryNorm;
|
||||||
|
private float queryWeight;
|
||||||
|
|
||||||
|
private ChildrenWeight(Filter childrenFilter, ParentOrdsCollector collector, IndexFieldData.WithOrdinals globalIfd) {
|
||||||
|
this.globalIfd = globalIfd;
|
||||||
|
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
|
||||||
|
this.parentOrds = collector.parentOrds;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
|
||||||
|
return new Explanation(getBoost(), "not implemented yet...");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query getQuery() {
|
||||||
|
return ParentConstantScoreQuery.this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float getValueForNormalization() throws IOException {
|
||||||
|
queryWeight = getBoost();
|
||||||
|
return queryWeight * queryWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void normalize(float norm, float topLevelBoost) {
|
||||||
|
this.queryNorm = norm * topLevelBoost;
|
||||||
|
queryWeight *= this.queryNorm;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Scorer scorer(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||||
|
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
|
||||||
|
if (DocIdSets.isEmpty(childrenDocIdSet)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
BytesValues.WithOrdinals globalValues = globalIfd.load(context).getBytesValues(false);
|
||||||
|
if (globalValues != null) {
|
||||||
|
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
|
||||||
|
if (innerIterator != null) {
|
||||||
|
Ordinals.Docs globalOrdinals = globalValues.ordinals();
|
||||||
|
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(
|
||||||
|
innerIterator, parentOrds, globalOrdinals
|
||||||
|
);
|
||||||
|
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
|
||||||
|
|
||||||
|
private final LongBitSet parentOrds;
|
||||||
|
private final Ordinals.Docs globalOrdinals;
|
||||||
|
|
||||||
|
ChildrenDocIdIterator(DocIdSetIterator innerIterator, LongBitSet parentOrds, Ordinals.Docs globalOrdinals) {
|
||||||
|
super(innerIterator);
|
||||||
|
this.parentOrds = parentOrds;
|
||||||
|
this.globalOrdinals = globalOrdinals;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean match(int docId) {
|
||||||
|
int globalOrd = (int) globalOrdinals.getOrd(docId);
|
||||||
|
if (globalOrd != Ordinals.MISSING_ORDINAL) {
|
||||||
|
return parentOrds.get(globalOrd);
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private final static class ParentOrdsCollector extends NoopCollector {
|
||||||
|
|
||||||
|
private final LongBitSet parentOrds;
|
||||||
|
private final IndexFieldData.WithOrdinals globalIfd;
|
||||||
|
|
||||||
|
private Ordinals.Docs globalOrdinals;
|
||||||
|
|
||||||
|
ParentOrdsCollector(IndexFieldData.WithOrdinals globalIfd, long maxOrd) {
|
||||||
|
this.parentOrds = new LongBitSet(maxOrd);
|
||||||
|
this.globalIfd = globalIfd;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void collect(int doc) throws IOException {
|
||||||
|
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
||||||
|
if (globalOrdinals != null) {
|
||||||
|
long globalOrd = globalOrdinals.getOrd(doc);
|
||||||
|
if (globalOrd != Ordinals.MISSING_ORDINAL) {
|
||||||
|
parentOrds.set(globalOrd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||||
|
BytesValues.WithOrdinals values = globalIfd.load(readerContext).getBytesValues(false);
|
||||||
|
if (values != null) {
|
||||||
|
globalOrdinals = values.ordinals();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long parentCount() {
|
||||||
|
return parentOrds.cardinality();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,36 +18,106 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.index.DocsEnum;
|
import org.apache.lucene.queries.TermFilter;
|
||||||
import org.apache.lucene.index.Terms;
|
|
||||||
import org.apache.lucene.index.TermsEnum;
|
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.*;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.elasticsearch.common.lucene.search.AndFilter;
|
||||||
|
import org.elasticsearch.common.util.LongHash;
|
||||||
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
import org.elasticsearch.index.mapper.Uid;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
import org.elasticsearch.common.util.BytesRefHash;
|
import org.elasticsearch.common.util.BytesRefHash;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Advantages over using this filter over Lucene's TermsFilter in the parent child context:
|
* Advantages over using this filter over Lucene's TermsFilter in the parent child context:
|
||||||
* 1) Don't need to copy all values over to a list from the id cache and then
|
* 1) Don't need to copy all values over to a list from the id cache and then
|
||||||
* copy all the ids values over to one continuous byte array. Should save a lot of of object creations and gcs..
|
* copy all the ids values over to one continuous byte array. Should save a lot of of object creations and gcs..
|
||||||
* 2) We filter docs by one field only.
|
* 2) We filter docs by one field only.
|
||||||
* 3) We can directly reference to values that originate from the id cache.
|
|
||||||
*/
|
*/
|
||||||
final class ParentIdsFilter extends Filter {
|
final class ParentIdsFilter extends Filter {
|
||||||
|
|
||||||
|
static Filter createShortCircuitFilter(Filter nonNestedDocsFilter, SearchContext searchContext,
|
||||||
|
String parentType, BytesValues.WithOrdinals globalValues,
|
||||||
|
LongBitSet parentOrds, long numFoundParents) {
|
||||||
|
if (numFoundParents == 1) {
|
||||||
|
globalValues.getValueByOrd(parentOrds.nextSetBit(0));
|
||||||
|
BytesRef id = globalValues.copyShared();
|
||||||
|
if (nonNestedDocsFilter != null) {
|
||||||
|
List<Filter> filters = Arrays.asList(
|
||||||
|
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
||||||
|
nonNestedDocsFilter
|
||||||
|
);
|
||||||
|
return new AndFilter(filters);
|
||||||
|
} else {
|
||||||
|
return new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
BytesRefHash parentIds= null;
|
||||||
|
boolean constructed = false;
|
||||||
|
try {
|
||||||
|
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
|
||||||
|
for (long parentOrd = parentOrds.nextSetBit(0l); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
|
||||||
|
parentIds.add(globalValues.getValueByOrd(parentOrd));
|
||||||
|
}
|
||||||
|
constructed = true;
|
||||||
|
} finally {
|
||||||
|
if (!constructed) {
|
||||||
|
Releasables.close(parentIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
|
||||||
|
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static Filter createShortCircuitFilter(Filter nonNestedDocsFilter, SearchContext searchContext,
|
||||||
|
String parentType, BytesValues.WithOrdinals globalValues,
|
||||||
|
LongHash parentIdxs, long numFoundParents) {
|
||||||
|
if (numFoundParents == 1) {
|
||||||
|
globalValues.getValueByOrd(parentIdxs.get(0));
|
||||||
|
BytesRef id = globalValues.copyShared();
|
||||||
|
if (nonNestedDocsFilter != null) {
|
||||||
|
List<Filter> filters = Arrays.asList(
|
||||||
|
new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
|
||||||
|
nonNestedDocsFilter
|
||||||
|
);
|
||||||
|
return new AndFilter(filters);
|
||||||
|
} else {
|
||||||
|
return new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
BytesRefHash parentIds = null;
|
||||||
|
boolean constructed = false;
|
||||||
|
try {
|
||||||
|
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
|
||||||
|
for (int id = 0; id < parentIdxs.size(); id++) {
|
||||||
|
parentIds.add(globalValues.getValueByOrd(parentIdxs.get(id)));
|
||||||
|
}
|
||||||
|
constructed = true;
|
||||||
|
} finally {
|
||||||
|
if (!constructed) {
|
||||||
|
Releasables.close(parentIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
|
||||||
|
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private final BytesRef parentTypeBr;
|
private final BytesRef parentTypeBr;
|
||||||
private final Filter nonNestedDocsFilter;
|
private final Filter nonNestedDocsFilter;
|
||||||
private final BytesRefHash parentIds;
|
private final BytesRefHash parentIds;
|
||||||
|
|
||||||
ParentIdsFilter(String parentType, Filter nonNestedDocsFilter, BytesRefHash parentIds) {
|
private ParentIdsFilter(String parentType, Filter nonNestedDocsFilter, BytesRefHash parentIds) {
|
||||||
this.nonNestedDocsFilter = nonNestedDocsFilter;
|
this.nonNestedDocsFilter = nonNestedDocsFilter;
|
||||||
this.parentTypeBr = new BytesRef(parentType);
|
this.parentTypeBr = new BytesRef(parentType);
|
||||||
this.parentIds = parentIds;
|
this.parentIds = parentIds;
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
|
||||||
import org.apache.lucene.util.ToStringUtils;
|
import org.apache.lucene.util.ToStringUtils;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
|
@ -33,10 +32,10 @@ import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
|
||||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||||
import org.elasticsearch.common.lucene.search.Queries;
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.common.util.BytesRefHash;
|
|
||||||
import org.elasticsearch.common.util.FloatArray;
|
import org.elasticsearch.common.util.FloatArray;
|
||||||
import org.elasticsearch.common.util.LongArray;
|
import org.elasticsearch.common.util.LongHash;
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
@ -99,11 +98,7 @@ public class ParentQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString(String field) {
|
public String toString(String field) {
|
||||||
StringBuilder sb = new StringBuilder();
|
return "ParentQuery[" + parentType + "](" + originalParentQuery.toString(field) + ')' + ToStringUtils.boost(getBoost());
|
||||||
sb.append("ParentQuery[").append(parentType).append("](")
|
|
||||||
.append(originalParentQuery.toString(field)).append(')')
|
|
||||||
.append(ToStringUtils.boost(getBoost()));
|
|
||||||
return sb.toString();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -133,62 +128,70 @@ public class ParentQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
||||||
SearchContext searchContext = SearchContext.current();
|
SearchContext sc = SearchContext.current();
|
||||||
final ParentIdAndScoreCollector collector = new ParentIdAndScoreCollector(searchContext, parentChildIndexFieldData, parentType);
|
|
||||||
ChildWeight childWeight;
|
ChildWeight childWeight;
|
||||||
boolean releaseCollectorResource = true;
|
boolean releaseCollectorResource = true;
|
||||||
|
ParentOrdAndScoreCollector collector = null;
|
||||||
|
IndexFieldData.WithOrdinals globalIfd = parentChildIndexFieldData.getGlobalParentChild(parentType, searcher.getIndexReader());
|
||||||
|
if (globalIfd == null) {
|
||||||
|
// No docs of the specified type don't exist on this shard
|
||||||
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
assert rewrittenParentQuery != null;
|
assert rewrittenParentQuery != null;
|
||||||
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader + " searcher.getIndexReader()=" + searcher.getIndexReader();
|
||||||
final Query parentQuery = rewrittenParentQuery;
|
final Query parentQuery = rewrittenParentQuery;
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
collector = new ParentOrdAndScoreCollector(sc, globalIfd);
|
||||||
|
IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader());
|
||||||
indexSearcher.setSimilarity(searcher.getSimilarity());
|
indexSearcher.setSimilarity(searcher.getSimilarity());
|
||||||
indexSearcher.search(parentQuery, collector);
|
indexSearcher.search(parentQuery, collector);
|
||||||
FloatArray scores = collector.scores;
|
if (collector.parentCount() == 0) {
|
||||||
BytesRefHash parentIds = collector.parentIds;
|
|
||||||
if (parentIds.size() == 0) {
|
|
||||||
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
return Queries.newMatchNoDocsQuery().createWeight(searcher);
|
||||||
}
|
}
|
||||||
childWeight = new ChildWeight(searchContext, parentQuery.createWeight(searcher), childrenFilter, parentIds, scores);
|
childWeight = new ChildWeight(parentQuery.createWeight(searcher), childrenFilter, collector, globalIfd);
|
||||||
releaseCollectorResource = false;
|
releaseCollectorResource = false;
|
||||||
} finally {
|
} finally {
|
||||||
if (releaseCollectorResource) {
|
if (releaseCollectorResource) {
|
||||||
// either if we run into an exception or if we return early
|
// either if we run into an exception or if we return early
|
||||||
Releasables.close(collector.parentIds, collector.scores);
|
Releasables.close(collector);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
searchContext.addReleasable(childWeight, Lifetime.COLLECTION);
|
sc.addReleasable(collector, Lifetime.COLLECTION);
|
||||||
return childWeight;
|
return childWeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class ParentIdAndScoreCollector extends NoopCollector {
|
private static class ParentOrdAndScoreCollector extends NoopCollector implements Releasable {
|
||||||
|
|
||||||
private final BytesRefHash parentIds;
|
private final LongHash parentIdxs;
|
||||||
private FloatArray scores;
|
private FloatArray scores;
|
||||||
private final ParentChildIndexFieldData indexFieldData;
|
private final IndexFieldData.WithOrdinals globalIfd;
|
||||||
private final String parentType;
|
|
||||||
private final BigArrays bigArrays;
|
private final BigArrays bigArrays;
|
||||||
|
|
||||||
private Scorer scorer;
|
private Scorer scorer;
|
||||||
private BytesValues values;
|
private BytesValues.WithOrdinals values;
|
||||||
|
private Ordinals.Docs globalOrdinals;
|
||||||
|
|
||||||
ParentIdAndScoreCollector(SearchContext searchContext, ParentChildIndexFieldData indexFieldData, String parentType) {
|
ParentOrdAndScoreCollector(SearchContext searchContext, IndexFieldData.WithOrdinals globalIfd) {
|
||||||
this.bigArrays = searchContext.bigArrays();
|
this.bigArrays = searchContext.bigArrays();
|
||||||
this.parentIds = new BytesRefHash(512, bigArrays);
|
this.parentIdxs = new LongHash(512, bigArrays);
|
||||||
this.scores = bigArrays.newFloatArray(512, false);
|
this.scores = bigArrays.newFloatArray(512, false);
|
||||||
this.indexFieldData = indexFieldData;
|
this.globalIfd = globalIfd;
|
||||||
this.parentType = parentType;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
||||||
if (values != null) {
|
if (globalOrdinals != null) {
|
||||||
values.setDocument(doc);
|
long globalOrdinal = globalOrdinals.getOrd(doc);
|
||||||
long index = parentIds.add(values.nextValue(), values.currentValueHash());
|
if (globalOrdinal != Ordinals.MISSING_ORDINAL) {
|
||||||
if (index >= 0) {
|
long parentIdx = parentIdxs.add(globalOrdinal);
|
||||||
scores = bigArrays.grow(scores, index + 1);
|
if (parentIdx >= 0) {
|
||||||
scores.set(index, scorer.score());
|
scores = bigArrays.grow(scores, parentIdx + 1);
|
||||||
|
scores.set(parentIdx, scorer.score());
|
||||||
|
} else {
|
||||||
|
assert false : "parent id should only match once, since there can only be one parent doc";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -200,27 +203,37 @@ public class ParentQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||||
values = indexFieldData.load(context).getBytesValues(parentType);
|
values = globalIfd.load(context).getBytesValues(false);
|
||||||
|
if (values != null) {
|
||||||
|
globalOrdinals = values.ordinals();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class ChildWeight extends Weight implements Releasable {
|
@Override
|
||||||
|
public void close() throws ElasticsearchException {
|
||||||
|
Releasables.close(parentIdxs, scores);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long parentCount() {
|
||||||
|
return parentIdxs.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private class ChildWeight extends Weight {
|
||||||
|
|
||||||
private final SearchContext searchContext;
|
|
||||||
private final Weight parentWeight;
|
private final Weight parentWeight;
|
||||||
private final Filter childrenFilter;
|
private final Filter childrenFilter;
|
||||||
private final BytesRefHash parentIds;
|
private final LongHash parentIdxs;
|
||||||
private final FloatArray scores;
|
private final FloatArray scores;
|
||||||
|
private final IndexFieldData.WithOrdinals globalIfd;
|
||||||
|
|
||||||
private FixedBitSet seenOrdinalsCache;
|
private ChildWeight(Weight parentWeight, Filter childrenFilter, ParentOrdAndScoreCollector collector, IndexFieldData.WithOrdinals globalIfd) {
|
||||||
private LongArray parentIdsIndexCache;
|
|
||||||
|
|
||||||
private ChildWeight(SearchContext searchContext, Weight parentWeight, Filter childrenFilter, BytesRefHash parentIds, FloatArray scores) {
|
|
||||||
this.searchContext = searchContext;
|
|
||||||
this.parentWeight = parentWeight;
|
this.parentWeight = parentWeight;
|
||||||
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
|
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
|
||||||
this.parentIds = parentIds;
|
this.parentIdxs = collector.parentIdxs;
|
||||||
this.scores = scores;
|
this.scores = collector.scores;
|
||||||
|
this.globalIfd = globalIfd;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -250,60 +263,33 @@ public class ParentQuery extends Query {
|
||||||
if (DocIdSets.isEmpty(childrenDocSet)) {
|
if (DocIdSets.isEmpty(childrenDocSet)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
BytesValues.WithOrdinals bytesValues = parentChildIndexFieldData.load(context).getBytesValues(parentType);
|
BytesValues.WithOrdinals bytesValues = globalIfd.load(context).getBytesValues(false);
|
||||||
if (bytesValues == null) {
|
if (bytesValues == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ordinals.Docs ordinals = bytesValues.ordinals();
|
Ordinals.Docs ordinals = bytesValues.ordinals();
|
||||||
final int maxOrd = (int) ordinals.getMaxOrd();
|
return new ChildScorer(this, parentIdxs, scores, childrenDocSet.iterator(), ordinals);
|
||||||
final BigArrays bigArrays = searchContext.bigArrays();
|
|
||||||
if (parentIdsIndexCache == null) {
|
|
||||||
parentIdsIndexCache = bigArrays.newLongArray(BigArrays.overSize(maxOrd), false);
|
|
||||||
} else if (parentIdsIndexCache.size() < maxOrd) {
|
|
||||||
parentIdsIndexCache = bigArrays.grow(parentIdsIndexCache, maxOrd);
|
|
||||||
}
|
|
||||||
parentIdsIndexCache.fill(0, maxOrd, -1L);
|
|
||||||
if (seenOrdinalsCache == null || seenOrdinalsCache.length() < maxOrd) {
|
|
||||||
seenOrdinalsCache = new FixedBitSet(maxOrd);
|
|
||||||
} else {
|
|
||||||
seenOrdinalsCache.clear(0, maxOrd);
|
|
||||||
}
|
|
||||||
return new ChildScorer(this, parentIds, scores, childrenDocSet.iterator(), bytesValues, ordinals, seenOrdinalsCache, parentIdsIndexCache);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() throws ElasticsearchException {
|
|
||||||
Releasables.close(parentIds, scores, parentIdsIndexCache);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class ChildScorer extends Scorer {
|
private static class ChildScorer extends Scorer {
|
||||||
|
|
||||||
private final BytesRefHash parentIds;
|
private final LongHash parentIdxs;
|
||||||
private final FloatArray scores;
|
private final FloatArray scores;
|
||||||
private final DocIdSetIterator childrenIterator;
|
private final DocIdSetIterator childrenIterator;
|
||||||
private final BytesValues.WithOrdinals bytesValues;
|
|
||||||
private final Ordinals.Docs ordinals;
|
private final Ordinals.Docs ordinals;
|
||||||
|
|
||||||
// This remembers what ordinals have already been seen in the current segment
|
|
||||||
// and prevents from fetch the actual id from FD and checking if it exists in parentIds
|
|
||||||
private final FixedBitSet seenOrdinals;
|
|
||||||
private final LongArray parentIdsIndex;
|
|
||||||
|
|
||||||
private int currentChildDoc = -1;
|
private int currentChildDoc = -1;
|
||||||
private float currentScore;
|
private float currentScore;
|
||||||
|
|
||||||
ChildScorer(Weight weight, BytesRefHash parentIds, FloatArray scores, DocIdSetIterator childrenIterator,
|
ChildScorer(Weight weight, LongHash parentIdxs, FloatArray scores, DocIdSetIterator childrenIterator, Ordinals.Docs ordinals) {
|
||||||
BytesValues.WithOrdinals bytesValues, Ordinals.Docs ordinals, FixedBitSet seenOrdinals, LongArray parentIdsIndex) {
|
|
||||||
super(weight);
|
super(weight);
|
||||||
this.parentIds = parentIds;
|
this.parentIdxs = parentIdxs;
|
||||||
this.scores = scores;
|
this.scores = scores;
|
||||||
this.childrenIterator = childrenIterator;
|
this.childrenIterator = childrenIterator;
|
||||||
this.bytesValues = bytesValues;
|
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
this.seenOrdinals = seenOrdinals;
|
|
||||||
this.parentIdsIndex = parentIdsIndex;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -331,28 +317,18 @@ public class ParentQuery extends Query {
|
||||||
return currentChildDoc;
|
return currentChildDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
int ord = (int) ordinals.getOrd(currentChildDoc);
|
int globalOrdinal = (int) ordinals.getOrd(currentChildDoc);
|
||||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!seenOrdinals.get(ord)) {
|
final long parentIdx = parentIdxs.find(globalOrdinal);
|
||||||
seenOrdinals.set(ord);
|
|
||||||
long parentIdx = parentIds.find(bytesValues.getValueByOrd(ord), bytesValues.currentValueHash());
|
|
||||||
if (parentIdx != -1) {
|
|
||||||
currentScore = scores.get(parentIdx);
|
|
||||||
parentIdsIndex.set(ord, parentIdx);
|
|
||||||
return currentChildDoc;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
long parentIdx = parentIdsIndex.get(ord);
|
|
||||||
if (parentIdx != -1) {
|
if (parentIdx != -1) {
|
||||||
currentScore = scores.get(parentIdx);
|
currentScore = scores.get(parentIdx);
|
||||||
return currentChildDoc;
|
return currentChildDoc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int advance(int target) throws IOException {
|
public int advance(int target) throws IOException {
|
||||||
|
@ -361,23 +337,12 @@ public class ParentQuery extends Query {
|
||||||
return currentChildDoc;
|
return currentChildDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
int ord = (int) ordinals.getOrd(currentChildDoc);
|
int globalOrdinal = (int) ordinals.getOrd(currentChildDoc);
|
||||||
if (ord == Ordinals.MISSING_ORDINAL) {
|
if (globalOrdinal == Ordinals.MISSING_ORDINAL) {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!seenOrdinals.get(ord)) {
|
final long parentIdx = parentIdxs.find(globalOrdinal);
|
||||||
seenOrdinals.set(ord);
|
|
||||||
long parentIdx = parentIds.find(bytesValues.getValueByOrd(ord), bytesValues.currentValueHash());
|
|
||||||
if (parentIdx != -1) {
|
|
||||||
currentScore = scores.get(parentIdx);
|
|
||||||
parentIdsIndex.set(ord, parentIdx);
|
|
||||||
return currentChildDoc;
|
|
||||||
} else {
|
|
||||||
return nextDoc();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
long parentIdx = parentIdsIndex.get(ord);
|
|
||||||
if (parentIdx != -1) {
|
if (parentIdx != -1) {
|
||||||
currentScore = scores.get(parentIdx);
|
currentScore = scores.get(parentIdx);
|
||||||
return currentChildDoc;
|
return currentChildDoc;
|
||||||
|
@ -385,7 +350,6 @@ public class ParentQuery extends Query {
|
||||||
return nextDoc();
|
return nextDoc();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long cost() {
|
public long cost() {
|
||||||
|
|
|
@ -120,6 +120,9 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
|
||||||
|
|
||||||
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
|
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
|
||||||
IndexSearcher searcher = new IndexSearcher(indexReader);
|
IndexSearcher searcher = new IndexSearcher(indexReader);
|
||||||
|
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(
|
||||||
|
SearchContext.current(), new Engine.SimpleSearcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
|
||||||
|
));
|
||||||
|
|
||||||
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
|
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
|
||||||
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
||||||
|
|
|
@ -1388,7 +1388,7 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
|
||||||
client().prepareIndex("grandissue", "child_type_two", "4").setParent("2").setRouting("1")
|
client().prepareIndex("grandissue", "child_type_two", "4").setParent("2").setRouting("1")
|
||||||
.setSource("name", "Kate")
|
.setSource("name", "Kate")
|
||||||
.get();
|
.get();
|
||||||
client().admin().indices().prepareRefresh("grandissue").get();
|
refresh();
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch("grandissue").setQuery(
|
SearchResponse searchResponse = client().prepareSearch("grandissue").setQuery(
|
||||||
boolQuery().must(
|
boolQuery().must(
|
||||||
|
|
Loading…
Reference in New Issue