lucene 4: upgraded o.e.index.search.child package
This commit is contained in:
parent
71c3bd7c64
commit
19ab1d0548
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.common.lucene.search;
|
package org.elasticsearch.common.lucene.search;
|
||||||
|
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.search.Similarity;
|
import org.apache.lucene.search.Weight;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -29,8 +29,8 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class EmptyScorer extends Scorer {
|
public class EmptyScorer extends Scorer {
|
||||||
|
|
||||||
public EmptyScorer(Similarity similarity) {
|
public EmptyScorer(Weight weight) {
|
||||||
super(similarity);
|
super(weight);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -38,6 +38,11 @@ public class EmptyScorer extends Scorer {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float freq() throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
return NO_MORE_DOCS;
|
return NO_MORE_DOCS;
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.lucene.search;
|
package org.elasticsearch.common.lucene.search;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.search.Collector;
|
import org.apache.lucene.search.Collector;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
@ -41,7 +42,8 @@ public class NoopCollector extends Collector {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.cache.id;
|
package org.elasticsearch.index.cache.id;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReader;
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.elasticsearch.common.component.CloseableComponent;
|
import org.elasticsearch.common.component.CloseableComponent;
|
||||||
|
@ -37,7 +38,7 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable<Id
|
||||||
|
|
||||||
void refresh(List<AtomicReaderContext> readers) throws Exception;
|
void refresh(List<AtomicReaderContext> readers) throws Exception;
|
||||||
|
|
||||||
IdReaderCache reader(IndexReader reader);
|
IdReaderCache reader(AtomicReader reader);
|
||||||
|
|
||||||
long sizeInBytes();
|
long sizeInBytes();
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.ElasticSearchException;
|
import org.elasticsearch.ElasticSearchException;
|
||||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||||
import org.elasticsearch.common.collect.MapBuilder;
|
import org.elasticsearch.common.collect.MapBuilder;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
@ -35,7 +34,6 @@ import org.elasticsearch.index.AbstractIndexComponent;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.cache.id.IdCache;
|
import org.elasticsearch.index.cache.id.IdCache;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderCache;
|
import org.elasticsearch.index.cache.id.IdReaderCache;
|
||||||
import org.elasticsearch.index.mapper.Uid;
|
|
||||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||||
import org.elasticsearch.index.settings.IndexSettings;
|
import org.elasticsearch.index.settings.IndexSettings;
|
||||||
|
@ -77,7 +75,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IdReaderCache reader(IndexReader reader) {
|
public IdReaderCache reader(AtomicReader reader) {
|
||||||
return idReaders.get(reader.getCoreCacheKey());
|
return idReaders.get(reader.getCoreCacheKey());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,7 +96,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
}
|
}
|
||||||
|
|
||||||
// do the refresh
|
// do the refresh
|
||||||
Map<Object, Map<BytesReference, TypeBuilder>> builders = new HashMap<Object, Map<BytesReference, TypeBuilder>>();
|
Map<Object, Map<String, TypeBuilder>> builders = new HashMap<Object, Map<String, TypeBuilder>>();
|
||||||
|
|
||||||
// first, go over and load all the id->doc map for all types
|
// first, go over and load all the id->doc map for all types
|
||||||
for (AtomicReaderContext context : atomicReaderContexts) {
|
for (AtomicReaderContext context : atomicReaderContexts) {
|
||||||
|
@ -111,7 +109,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
if (reader instanceof SegmentReader) {
|
if (reader instanceof SegmentReader) {
|
||||||
((SegmentReader) reader).addCoreClosedListener(this);
|
((SegmentReader) reader).addCoreClosedListener(this);
|
||||||
}
|
}
|
||||||
Map<BytesReference, TypeBuilder> readerBuilder = new HashMap<BytesReference, TypeBuilder>();
|
Map<String, TypeBuilder> readerBuilder = new HashMap<String, TypeBuilder>();
|
||||||
builders.put(reader.getCoreCacheKey(), readerBuilder);
|
builders.put(reader.getCoreCacheKey(), readerBuilder);
|
||||||
|
|
||||||
|
|
||||||
|
@ -124,10 +122,10 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
DocsEnum docsEnum = null;
|
DocsEnum docsEnum = null;
|
||||||
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.term()) {
|
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.term()) {
|
||||||
HashedBytesArray[] typeAndId = splitUidIntoTypeAndId(term);
|
HashedBytesArray[] typeAndId = splitUidIntoTypeAndId(term);
|
||||||
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0]);
|
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0].toUtf8());
|
||||||
if (typeBuilder == null) {
|
if (typeBuilder == null) {
|
||||||
typeBuilder = new TypeBuilder(reader);
|
typeBuilder = new TypeBuilder(reader);
|
||||||
readerBuilder.put(typeAndId[0], typeBuilder);
|
readerBuilder.put(typeAndId[0].toUtf8(), typeBuilder);
|
||||||
}
|
}
|
||||||
|
|
||||||
HashedBytesArray idAsBytes = checkIfCanReuse(builders, typeAndId[1]);
|
HashedBytesArray idAsBytes = checkIfCanReuse(builders, typeAndId[1]);
|
||||||
|
@ -148,7 +146,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<BytesReference, TypeBuilder> readerBuilder = builders.get(reader.getCoreCacheKey());
|
Map<String, TypeBuilder> readerBuilder = builders.get(reader.getCoreCacheKey());
|
||||||
|
|
||||||
Terms terms = reader.terms(ParentFieldMapper.NAME);
|
Terms terms = reader.terms(ParentFieldMapper.NAME);
|
||||||
if (terms == null) { // Should not happen
|
if (terms == null) { // Should not happen
|
||||||
|
@ -160,10 +158,10 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.term()) {
|
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.term()) {
|
||||||
HashedBytesArray[] typeAndId = splitUidIntoTypeAndId(term);
|
HashedBytesArray[] typeAndId = splitUidIntoTypeAndId(term);
|
||||||
|
|
||||||
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0]);
|
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0].toUtf8());
|
||||||
if (typeBuilder == null) {
|
if (typeBuilder == null) {
|
||||||
typeBuilder = new TypeBuilder(reader);
|
typeBuilder = new TypeBuilder(reader);
|
||||||
readerBuilder.put(typeAndId[0], typeBuilder);
|
readerBuilder.put(typeAndId[0].toUtf8(), typeBuilder);
|
||||||
}
|
}
|
||||||
|
|
||||||
HashedBytesArray idAsBytes = checkIfCanReuse(builders, typeAndId[1]);
|
HashedBytesArray idAsBytes = checkIfCanReuse(builders, typeAndId[1]);
|
||||||
|
@ -186,9 +184,9 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
|
|
||||||
|
|
||||||
// now, build it back
|
// now, build it back
|
||||||
for (Map.Entry<Object, Map<BytesReference, TypeBuilder>> entry : builders.entrySet()) {
|
for (Map.Entry<Object, Map<String, TypeBuilder>> entry : builders.entrySet()) {
|
||||||
MapBuilder<BytesReference, SimpleIdReaderTypeCache> types = MapBuilder.newMapBuilder();
|
MapBuilder<String, SimpleIdReaderTypeCache> types = MapBuilder.newMapBuilder();
|
||||||
for (Map.Entry<BytesReference, TypeBuilder> typeBuilderEntry : entry.getValue().entrySet()) {
|
for (Map.Entry<String, TypeBuilder> typeBuilderEntry : entry.getValue().entrySet()) {
|
||||||
types.put(typeBuilderEntry.getKey(), new SimpleIdReaderTypeCache(typeBuilderEntry.getKey(),
|
types.put(typeBuilderEntry.getKey(), new SimpleIdReaderTypeCache(typeBuilderEntry.getKey(),
|
||||||
typeBuilderEntry.getValue().idToDoc,
|
typeBuilderEntry.getValue().idToDoc,
|
||||||
typeBuilderEntry.getValue().docToId,
|
typeBuilderEntry.getValue().docToId,
|
||||||
|
@ -210,7 +208,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
return sizeInBytes;
|
return sizeInBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
private HashedBytesArray checkIfCanReuse(Map<Object, Map<BytesReference, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
|
private HashedBytesArray checkIfCanReuse(Map<Object, Map<String, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
|
||||||
HashedBytesArray finalIdAsBytes;
|
HashedBytesArray finalIdAsBytes;
|
||||||
// go over and see if we can reuse this id
|
// go over and see if we can reuse this id
|
||||||
for (SimpleIdReaderCache idReaderCache : idReaders.values()) {
|
for (SimpleIdReaderCache idReaderCache : idReaders.values()) {
|
||||||
|
@ -219,7 +217,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
||||||
return finalIdAsBytes;
|
return finalIdAsBytes;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (Map<BytesReference, TypeBuilder> map : builders.values()) {
|
for (Map<String, TypeBuilder> map : builders.values()) {
|
||||||
for (TypeBuilder typeBuilder : map.values()) {
|
for (TypeBuilder typeBuilder : map.values()) {
|
||||||
finalIdAsBytes = typeBuilder.canReuse(idAsBytes);
|
finalIdAsBytes = typeBuilder.canReuse(idAsBytes);
|
||||||
if (finalIdAsBytes != null) {
|
if (finalIdAsBytes != null) {
|
||||||
|
|
|
@ -20,8 +20,6 @@
|
||||||
package org.elasticsearch.index.cache.id.simple;
|
package org.elasticsearch.index.cache.id.simple;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderCache;
|
import org.elasticsearch.index.cache.id.IdReaderCache;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
|
@ -33,9 +31,9 @@ public class SimpleIdReaderCache implements IdReaderCache {
|
||||||
|
|
||||||
private final Object readerCacheKey;
|
private final Object readerCacheKey;
|
||||||
|
|
||||||
private final ImmutableMap<BytesReference, SimpleIdReaderTypeCache> types;
|
private final ImmutableMap<String, SimpleIdReaderTypeCache> types;
|
||||||
|
|
||||||
public SimpleIdReaderCache(Object readerCacheKey, ImmutableMap<BytesReference, SimpleIdReaderTypeCache> types) {
|
public SimpleIdReaderCache(Object readerCacheKey, ImmutableMap<String, SimpleIdReaderTypeCache> types) {
|
||||||
this.readerCacheKey = readerCacheKey;
|
this.readerCacheKey = readerCacheKey;
|
||||||
this.types = types;
|
this.types = types;
|
||||||
}
|
}
|
||||||
|
@ -47,12 +45,12 @@ public class SimpleIdReaderCache implements IdReaderCache {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IdReaderTypeCache type(String type) {
|
public IdReaderTypeCache type(String type) {
|
||||||
return types.get(new BytesArray(type));
|
return types.get(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public HashedBytesArray parentIdByDoc(String type, int docId) {
|
public HashedBytesArray parentIdByDoc(String type, int docId) {
|
||||||
SimpleIdReaderTypeCache typeCache = types.get(new BytesArray(type));
|
SimpleIdReaderTypeCache typeCache = types.get(type);
|
||||||
if (typeCache != null) {
|
if (typeCache != null) {
|
||||||
return typeCache.parentIdByDoc(docId);
|
return typeCache.parentIdByDoc(docId);
|
||||||
}
|
}
|
||||||
|
@ -61,7 +59,7 @@ public class SimpleIdReaderCache implements IdReaderCache {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docById(String type, HashedBytesArray id) {
|
public int docById(String type, HashedBytesArray id) {
|
||||||
SimpleIdReaderTypeCache typeCache = types.get(new BytesArray(type));
|
SimpleIdReaderTypeCache typeCache = types.get(type);
|
||||||
if (typeCache != null) {
|
if (typeCache != null) {
|
||||||
return typeCache.docById(id);
|
return typeCache.docById(id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.cache.id.simple;
|
||||||
|
|
||||||
import gnu.trove.impl.hash.TObjectHash;
|
import gnu.trove.impl.hash.TObjectHash;
|
||||||
import org.elasticsearch.common.RamUsage;
|
import org.elasticsearch.common.RamUsage;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||||
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
|
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
|
||||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
|
@ -31,7 +30,7 @@ import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||||
*/
|
*/
|
||||||
public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
|
public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
|
||||||
|
|
||||||
private final BytesReference type;
|
private final String type;
|
||||||
|
|
||||||
private final ExtTObjectIntHasMap<HashedBytesArray> idToDoc;
|
private final ExtTObjectIntHasMap<HashedBytesArray> idToDoc;
|
||||||
|
|
||||||
|
@ -43,7 +42,7 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
|
||||||
|
|
||||||
private long sizeInBytes = -1;
|
private long sizeInBytes = -1;
|
||||||
|
|
||||||
public SimpleIdReaderTypeCache(BytesReference type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
|
public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
|
||||||
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
|
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
this.idToDoc = idToDoc;
|
this.idToDoc = idToDoc;
|
||||||
|
@ -53,7 +52,7 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
|
||||||
this.parentIdsOrdinals = parentIdsOrdinals;
|
this.parentIdsOrdinals = parentIdsOrdinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesReference type() {
|
public String type() {
|
||||||
return this.type;
|
return this.type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.AtomicReader;
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.search.Collector;
|
import org.apache.lucene.search.Collector;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
@ -41,7 +43,7 @@ public class ChildCollector extends Collector {
|
||||||
|
|
||||||
private final SearchContext context;
|
private final SearchContext context;
|
||||||
|
|
||||||
private final Tuple<IndexReader, IdReaderTypeCache>[] readers;
|
private final Tuple<AtomicReader, IdReaderTypeCache>[] readers;
|
||||||
|
|
||||||
private final Map<Object, FixedBitSet> parentDocs;
|
private final Map<Object, FixedBitSet> parentDocs;
|
||||||
|
|
||||||
|
@ -53,10 +55,12 @@ public class ChildCollector extends Collector {
|
||||||
this.parentDocs = new HashMap<Object, FixedBitSet>();
|
this.parentDocs = new HashMap<Object, FixedBitSet>();
|
||||||
|
|
||||||
// create a specific type map lookup for faster lookup operations per doc
|
// create a specific type map lookup for faster lookup operations per doc
|
||||||
this.readers = new Tuple[context.searcher().subReaders().length];
|
this.readers = new Tuple[context.searcher().getIndexReader().leaves().size()];
|
||||||
for (int i = 0; i < readers.length; i++) {
|
for (int i = 0; i < readers.length; i++) {
|
||||||
IndexReader reader = context.searcher().subReaders()[i];
|
AtomicReaderContext readerContext = context.searcher().getIndexReader().leaves().get(i);
|
||||||
readers[i] = new Tuple<IndexReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
|
readers[i] = new Tuple<AtomicReader, IdReaderTypeCache>(
|
||||||
|
readerContext.reader(), context.idCache().reader(readerContext.reader()).type(parentType)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,14 +79,14 @@ public class ChildCollector extends Collector {
|
||||||
if (parentId == null) {
|
if (parentId == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for (Tuple<IndexReader, IdReaderTypeCache> tuple : readers) {
|
for (Tuple<AtomicReader, IdReaderTypeCache> tuple : readers) {
|
||||||
IndexReader indexReader = tuple.v1();
|
IndexReader indexReader = tuple.v1();
|
||||||
IdReaderTypeCache idReaderTypeCache = tuple.v2();
|
IdReaderTypeCache idReaderTypeCache = tuple.v2();
|
||||||
if (idReaderTypeCache == null) { // might be if we don't have that doc with that type in this reader
|
if (idReaderTypeCache == null) { // might be if we don't have that doc with that type in this reader
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
int parentDocId = idReaderTypeCache.docById(parentId);
|
int parentDocId = idReaderTypeCache.docById(parentId);
|
||||||
if (parentDocId != -1 && !indexReader.isDeleted(parentDocId)) {
|
if (parentDocId != -1) {
|
||||||
FixedBitSet docIdSet = parentDocs().get(indexReader.getCoreCacheKey());
|
FixedBitSet docIdSet = parentDocs().get(indexReader.getCoreCacheKey());
|
||||||
if (docIdSet == null) {
|
if (docIdSet == null) {
|
||||||
docIdSet = new FixedBitSet(indexReader.maxDoc());
|
docIdSet = new FixedBitSet(indexReader.maxDoc());
|
||||||
|
@ -95,8 +99,8 @@ public class ChildCollector extends Collector {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||||
typeCache = context.idCache().reader(reader).type(parentType);
|
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -20,11 +20,13 @@
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import gnu.trove.set.hash.THashSet;
|
import gnu.trove.set.hash.THashSet;
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.search.Collector;
|
import org.apache.lucene.search.Collector;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
import org.elasticsearch.common.CacheRecycler;
|
||||||
|
@ -106,13 +108,15 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
||||||
parentDocs = null;
|
parentDocs = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||||
if (parentDocs == null) {
|
if (parentDocs == null) {
|
||||||
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
|
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// np need to use acceptDocs, since the parentDocs were collected with a collector, which means those
|
||||||
|
// collected docs are not deleted
|
||||||
// ok to return null
|
// ok to return null
|
||||||
return parentDocs.get(reader.getCoreCacheKey());
|
return parentDocs.get(context.reader().getCoreCacheKey());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -138,14 +142,14 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
||||||
collectedUids = ((UidCollector) collector).collectedUids;
|
collectedUids = ((UidCollector) collector).collectedUids;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
|
||||||
if (collectedUids == null) {
|
if (collectedUids == null) {
|
||||||
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
|
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
|
||||||
}
|
}
|
||||||
|
|
||||||
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(reader).type(parentType);
|
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
|
||||||
if (idReaderTypeCache != null) {
|
if (idReaderTypeCache != null) {
|
||||||
return new ParentDocSet(reader, collectedUids, idReaderTypeCache);
|
return new ParentDocSet(context.reader(), collectedUids, idReaderTypeCache, acceptDocs);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -163,16 +167,18 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
||||||
final IndexReader reader;
|
final IndexReader reader;
|
||||||
final THashSet<HashedBytesArray> parents;
|
final THashSet<HashedBytesArray> parents;
|
||||||
final IdReaderTypeCache typeCache;
|
final IdReaderTypeCache typeCache;
|
||||||
|
final Bits acceptDocs;
|
||||||
|
|
||||||
ParentDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
|
ParentDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache, Bits acceptDocs) {
|
||||||
super(reader.maxDoc());
|
super(reader.maxDoc());
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
this.parents = parents;
|
this.parents = parents;
|
||||||
this.typeCache = typeCache;
|
this.typeCache = typeCache;
|
||||||
|
this.acceptDocs = acceptDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean get(int doc) {
|
public boolean get(int doc) {
|
||||||
return !reader.isDeleted(doc) && parents.contains(typeCache.idByDoc(doc));
|
return !acceptDocs.get(doc) && parents.contains(typeCache.idByDoc(doc));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,8 +202,8 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||||
typeCache = context.idCache().reader(reader).type(parentType);
|
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,11 +20,14 @@
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import gnu.trove.set.hash.THashSet;
|
import gnu.trove.set.hash.THashSet;
|
||||||
|
import org.apache.lucene.index.AtomicReader;
|
||||||
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.search.Collector;
|
import org.apache.lucene.search.Collector;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.Filter;
|
import org.apache.lucene.search.Filter;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.FixedBitSet;
|
import org.apache.lucene.util.FixedBitSet;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
import org.elasticsearch.common.CacheRecycler;
|
||||||
|
@ -104,14 +107,14 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
parents = ((ParentUidsCollector) collector).collectedUids;
|
parents = ((ParentUidsCollector) collector).collectedUids;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
||||||
if (parents == null) {
|
if (parents == null) {
|
||||||
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
|
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
|
||||||
}
|
}
|
||||||
|
|
||||||
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(reader).type(parentType);
|
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||||
if (idReaderTypeCache != null) {
|
if (idReaderTypeCache != null) {
|
||||||
return new ChildrenDocSet(reader, parents, idReaderTypeCache);
|
return new ChildrenDocSet(readerContext.reader(), parents, idReaderTypeCache, acceptDocs);
|
||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -129,16 +132,18 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
final IndexReader reader;
|
final IndexReader reader;
|
||||||
final THashSet<HashedBytesArray> parents;
|
final THashSet<HashedBytesArray> parents;
|
||||||
final IdReaderTypeCache idReaderTypeCache;
|
final IdReaderTypeCache idReaderTypeCache;
|
||||||
|
final Bits acceptDocs;
|
||||||
|
|
||||||
ChildrenDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
|
ChildrenDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache, Bits acceptDocs) {
|
||||||
super(reader.maxDoc());
|
super(reader.maxDoc());
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
this.parents = parents;
|
this.parents = parents;
|
||||||
this.idReaderTypeCache = idReaderTypeCache;
|
this.idReaderTypeCache = idReaderTypeCache;
|
||||||
|
this.acceptDocs = acceptDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean get(int doc) {
|
public boolean get(int doc) {
|
||||||
return !reader.isDeleted(doc) && parents.contains(idReaderTypeCache.parentIdByDoc(doc));
|
return !acceptDocs.get(doc) && parents.contains(idReaderTypeCache.parentIdByDoc(doc));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -161,8 +166,9 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
collectedUids.add(typeCache.idByDoc(doc));
|
collectedUids.add(typeCache.idByDoc(doc));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
@Override
|
||||||
typeCache = context.idCache().reader(reader).type(parentType);
|
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||||
|
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -188,12 +194,12 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
parentDocs = ((ParentDocsCollector) collector).segmentResults;
|
parentDocs = ((ParentDocsCollector) collector).segmentResults;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
|
||||||
if (parentDocs == null) {
|
if (parentDocs == null) {
|
||||||
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
|
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ChildrenDocSet(reader, parentDocs, context, parentType);
|
return new ChildrenDocSet(readerContext.reader(), parentDocs, context, parentType, acceptDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void clear() {
|
public void clear() {
|
||||||
|
@ -203,25 +209,27 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
static class ChildrenDocSet extends GetDocSet {
|
static class ChildrenDocSet extends GetDocSet {
|
||||||
|
|
||||||
final IdReaderTypeCache currentTypeCache;
|
final IdReaderTypeCache currentTypeCache;
|
||||||
final IndexReader currentReader;
|
final AtomicReader currentReader;
|
||||||
final Tuple<IndexReader, IdReaderTypeCache>[] readersToTypeCache;
|
final Tuple<AtomicReader, IdReaderTypeCache>[] readersToTypeCache;
|
||||||
final Map<Object, FixedBitSet> parentDocs;
|
final Map<Object, FixedBitSet> parentDocs;
|
||||||
|
final Bits acceptDocs;
|
||||||
|
|
||||||
ChildrenDocSet(IndexReader currentReader, Map<Object, FixedBitSet> parentDocs,
|
ChildrenDocSet(AtomicReader currentReader, Map<Object, FixedBitSet> parentDocs,
|
||||||
SearchContext context, String parentType) {
|
SearchContext context, String parentType, Bits acceptDocs) {
|
||||||
super(currentReader.maxDoc());
|
super(currentReader.maxDoc());
|
||||||
|
this.acceptDocs = acceptDocs;
|
||||||
this.currentTypeCache = context.idCache().reader(currentReader).type(parentType);
|
this.currentTypeCache = context.idCache().reader(currentReader).type(parentType);
|
||||||
this.currentReader = currentReader;
|
this.currentReader = currentReader;
|
||||||
this.parentDocs = parentDocs;
|
this.parentDocs = parentDocs;
|
||||||
this.readersToTypeCache = new Tuple[context.searcher().subReaders().length];
|
this.readersToTypeCache = new Tuple[context.searcher().getIndexReader().leaves().size()];
|
||||||
for (int i = 0; i < readersToTypeCache.length; i++) {
|
for (int i = 0; i < readersToTypeCache.length; i++) {
|
||||||
IndexReader reader = context.searcher().subReaders()[i];
|
AtomicReader reader = context.searcher().getIndexReader().leaves().get(i).reader();
|
||||||
readersToTypeCache[i] = new Tuple<IndexReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
|
readersToTypeCache[i] = new Tuple<AtomicReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean get(int doc) {
|
public boolean get(int doc) {
|
||||||
if (currentReader.isDeleted(doc) || doc == -1) {
|
if (acceptDocs.get(doc) || doc == -1) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,7 +238,7 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Tuple<IndexReader, IdReaderTypeCache> readerTypeCacheTuple : readersToTypeCache) {
|
for (Tuple<AtomicReader, IdReaderTypeCache> readerTypeCacheTuple : readersToTypeCache) {
|
||||||
int parentDocId = readerTypeCacheTuple.v2().docById(parentId);
|
int parentDocId = readerTypeCacheTuple.v2().docById(parentId);
|
||||||
if (parentDocId == -1) {
|
if (parentDocId == -1) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -254,8 +262,9 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
|
||||||
current.set(doc);
|
current.set(doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setNextReader(IndexReader reader, int docBase) throws IOException {
|
@Override
|
||||||
segmentResults.put(reader.getCoreCacheKey(), current = new FixedBitSet(reader.maxDoc()));
|
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||||
|
segmentResults.put(context.reader().getCoreCacheKey(), current = new FixedBitSet(context.reader().maxDoc()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,9 +20,9 @@
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import gnu.trove.map.hash.TIntObjectHashMap;
|
import gnu.trove.map.hash.TIntObjectHashMap;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.index.Term;
|
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.ToStringUtils;
|
import org.apache.lucene.util.ToStringUtils;
|
||||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||||
|
@ -124,20 +124,21 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||||
public void processResults(TopDocs topDocs, SearchContext context) {
|
public void processResults(TopDocs topDocs, SearchContext context) {
|
||||||
Map<Object, TIntObjectHashMap<ParentDoc>> parentDocsPerReader = new HashMap<Object, TIntObjectHashMap<ParentDoc>>();
|
Map<Object, TIntObjectHashMap<ParentDoc>> parentDocsPerReader = new HashMap<Object, TIntObjectHashMap<ParentDoc>>();
|
||||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
||||||
int readerIndex = context.searcher().readerIndex(scoreDoc.doc);
|
int readerIndex = ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves());
|
||||||
IndexReader subReader = context.searcher().subReaders()[readerIndex];
|
AtomicReaderContext subContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
||||||
int subDoc = scoreDoc.doc - context.searcher().docStarts()[readerIndex];
|
int subDoc = scoreDoc.doc - subContext.docBase;
|
||||||
|
|
||||||
// find the parent id
|
// find the parent id
|
||||||
HashedBytesArray parentId = context.idCache().reader(subReader).parentIdByDoc(parentType, subDoc);
|
HashedBytesArray parentId = context.idCache().reader(subContext.reader()).parentIdByDoc(parentType, subDoc);
|
||||||
if (parentId == null) {
|
if (parentId == null) {
|
||||||
// no parent found
|
// no parent found
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// now go over and find the parent doc Id and reader tuple
|
// now go over and find the parent doc Id and reader tuple
|
||||||
for (IndexReader indexReader : context.searcher().subReaders()) {
|
for (AtomicReaderContext atomicReaderContext : context.searcher().getIndexReader().leaves()) {
|
||||||
|
AtomicReader indexReader = atomicReaderContext.reader();
|
||||||
int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId);
|
int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId);
|
||||||
if (parentDocId != -1 && !indexReader.isDeleted(parentDocId)) {
|
if (parentDocId != -1 && !indexReader.getLiveDocs().get(parentDocId)) {
|
||||||
// we found a match, add it and break
|
// we found a match, add it and break
|
||||||
|
|
||||||
TIntObjectHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey());
|
TIntObjectHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey());
|
||||||
|
@ -205,15 +206,15 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Weight createWeight(Searcher searcher) throws IOException {
|
public Weight createWeight(IndexSearcher searcher) throws IOException {
|
||||||
if (!properlyInvoked) {
|
if (!properlyInvoked) {
|
||||||
throw new ElasticSearchIllegalStateException("top_children query hasn't executed properly");
|
throw new ElasticSearchIllegalStateException("top_children query hasn't executed properly");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parentDocs != null) {
|
if (parentDocs != null) {
|
||||||
return new ParentWeight(searcher, query.weight(searcher));
|
return new ParentWeight(searcher, query.createWeight(searcher));
|
||||||
}
|
}
|
||||||
return query.weight(searcher);
|
return query.createWeight(searcher);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toString(String field) {
|
public String toString(String field) {
|
||||||
|
@ -225,11 +226,11 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||||
|
|
||||||
class ParentWeight extends Weight {
|
class ParentWeight extends Weight {
|
||||||
|
|
||||||
final Searcher searcher;
|
final IndexSearcher searcher;
|
||||||
|
|
||||||
final Weight queryWeight;
|
final Weight queryWeight;
|
||||||
|
|
||||||
public ParentWeight(Searcher searcher, Weight queryWeight) throws IOException {
|
public ParentWeight(IndexSearcher searcher, Weight queryWeight) throws IOException {
|
||||||
this.searcher = searcher;
|
this.searcher = searcher;
|
||||||
this.queryWeight = queryWeight;
|
this.queryWeight = queryWeight;
|
||||||
}
|
}
|
||||||
|
@ -243,28 +244,28 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float sumOfSquaredWeights() throws IOException {
|
public float getValueForNormalization() throws IOException {
|
||||||
float sum = queryWeight.sumOfSquaredWeights();
|
float sum = queryWeight.getValueForNormalization();
|
||||||
sum *= getBoost() * getBoost();
|
sum *= getBoost() * getBoost();
|
||||||
return sum;
|
return sum;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void normalize(float norm) {
|
public void normalize(float norm, float topLevelBoost) {
|
||||||
// nothing to do here....
|
// Nothing to normalize
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder, boolean topScorer) throws IOException {
|
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
|
||||||
ParentDoc[] readerParentDocs = parentDocs.get(reader.getCoreCacheKey());
|
ParentDoc[] readerParentDocs = parentDocs.get(context.reader().getCoreCacheKey());
|
||||||
if (readerParentDocs != null) {
|
if (readerParentDocs != null) {
|
||||||
return new ParentScorer(getSimilarity(searcher), readerParentDocs);
|
return new ParentScorer(this, readerParentDocs);
|
||||||
}
|
}
|
||||||
return new EmptyScorer(getSimilarity(searcher));
|
return new EmptyScorer(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(IndexReader reader, int doc) throws IOException {
|
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
|
||||||
return new Explanation(getBoost(), "not implemented yet...");
|
return new Explanation(getBoost(), "not implemented yet...");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -275,8 +276,8 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||||
|
|
||||||
private int index = -1;
|
private int index = -1;
|
||||||
|
|
||||||
private ParentScorer(Similarity similarity, ParentDoc[] docs) throws IOException {
|
private ParentScorer(ParentWeight weight, ParentDoc[] docs) throws IOException {
|
||||||
super(similarity);
|
super(weight);
|
||||||
this.docs = docs;
|
this.docs = docs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -315,5 +316,10 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
|
||||||
}
|
}
|
||||||
throw new ElasticSearchIllegalStateException("No support for score type [" + scoreType + "]");
|
throw new ElasticSearchIllegalStateException("No support for score type [" + scoreType + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public float freq() throws IOException {
|
||||||
|
return docs[index].count; // The number of matches in the child doc, which is propagated to parent
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue