lucene 4: upgraded o.e.index.search.child package

This commit is contained in:
Martijn van Groningen 2012-10-26 19:53:23 +02:00 committed by Shay Banon
parent 71c3bd7c64
commit 19ab1d0548
10 changed files with 120 additions and 92 deletions

View File

@ -20,7 +20,7 @@
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.search.Weight;
import java.io.IOException;
@ -29,8 +29,8 @@ import java.io.IOException;
*/
public class EmptyScorer extends Scorer {
public EmptyScorer(Similarity similarity) {
super(similarity);
public EmptyScorer(Weight weight) {
super(weight);
}
@Override
@ -38,6 +38,11 @@ public class EmptyScorer extends Scorer {
return 0;
}
@Override
public float freq() throws IOException {
return 0;
}
@Override
public int docID() {
return NO_MORE_DOCS;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
@ -41,7 +42,8 @@ public class NoopCollector extends Collector {
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
public void setNextReader(AtomicReaderContext context) throws IOException {
throw new UnsupportedOperationException();
}
@Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.cache.id;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.component.CloseableComponent;
@ -37,7 +38,7 @@ public interface IdCache extends IndexComponent, CloseableComponent, Iterable<Id
void refresh(List<AtomicReaderContext> readers) throws Exception;
IdReaderCache reader(IndexReader reader);
IdReaderCache reader(AtomicReader reader);
long sizeInBytes();

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
@ -35,7 +34,6 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.cache.id.IdReaderCache;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
@ -77,7 +75,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
}
@Override
public IdReaderCache reader(IndexReader reader) {
public IdReaderCache reader(AtomicReader reader) {
return idReaders.get(reader.getCoreCacheKey());
}
@ -98,7 +96,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
}
// do the refresh
Map<Object, Map<BytesReference, TypeBuilder>> builders = new HashMap<Object, Map<BytesReference, TypeBuilder>>();
Map<Object, Map<String, TypeBuilder>> builders = new HashMap<Object, Map<String, TypeBuilder>>();
// first, go over and load all the id->doc map for all types
for (AtomicReaderContext context : atomicReaderContexts) {
@ -111,7 +109,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
if (reader instanceof SegmentReader) {
((SegmentReader) reader).addCoreClosedListener(this);
}
Map<BytesReference, TypeBuilder> readerBuilder = new HashMap<BytesReference, TypeBuilder>();
Map<String, TypeBuilder> readerBuilder = new HashMap<String, TypeBuilder>();
builders.put(reader.getCoreCacheKey(), readerBuilder);
@ -124,10 +122,10 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
DocsEnum docsEnum = null;
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.term()) {
HashedBytesArray[] typeAndId = splitUidIntoTypeAndId(term);
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0]);
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0].toUtf8());
if (typeBuilder == null) {
typeBuilder = new TypeBuilder(reader);
readerBuilder.put(typeAndId[0], typeBuilder);
readerBuilder.put(typeAndId[0].toUtf8(), typeBuilder);
}
HashedBytesArray idAsBytes = checkIfCanReuse(builders, typeAndId[1]);
@ -148,7 +146,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
continue;
}
Map<BytesReference, TypeBuilder> readerBuilder = builders.get(reader.getCoreCacheKey());
Map<String, TypeBuilder> readerBuilder = builders.get(reader.getCoreCacheKey());
Terms terms = reader.terms(ParentFieldMapper.NAME);
if (terms == null) { // Should not happen
@ -160,10 +158,10 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.term()) {
HashedBytesArray[] typeAndId = splitUidIntoTypeAndId(term);
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0]);
TypeBuilder typeBuilder = readerBuilder.get(typeAndId[0].toUtf8());
if (typeBuilder == null) {
typeBuilder = new TypeBuilder(reader);
readerBuilder.put(typeAndId[0], typeBuilder);
readerBuilder.put(typeAndId[0].toUtf8(), typeBuilder);
}
HashedBytesArray idAsBytes = checkIfCanReuse(builders, typeAndId[1]);
@ -186,9 +184,9 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
// now, build it back
for (Map.Entry<Object, Map<BytesReference, TypeBuilder>> entry : builders.entrySet()) {
MapBuilder<BytesReference, SimpleIdReaderTypeCache> types = MapBuilder.newMapBuilder();
for (Map.Entry<BytesReference, TypeBuilder> typeBuilderEntry : entry.getValue().entrySet()) {
for (Map.Entry<Object, Map<String, TypeBuilder>> entry : builders.entrySet()) {
MapBuilder<String, SimpleIdReaderTypeCache> types = MapBuilder.newMapBuilder();
for (Map.Entry<String, TypeBuilder> typeBuilderEntry : entry.getValue().entrySet()) {
types.put(typeBuilderEntry.getKey(), new SimpleIdReaderTypeCache(typeBuilderEntry.getKey(),
typeBuilderEntry.getValue().idToDoc,
typeBuilderEntry.getValue().docToId,
@ -210,7 +208,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
return sizeInBytes;
}
private HashedBytesArray checkIfCanReuse(Map<Object, Map<BytesReference, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
private HashedBytesArray checkIfCanReuse(Map<Object, Map<String, TypeBuilder>> builders, HashedBytesArray idAsBytes) {
HashedBytesArray finalIdAsBytes;
// go over and see if we can reuse this id
for (SimpleIdReaderCache idReaderCache : idReaders.values()) {
@ -219,7 +217,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
return finalIdAsBytes;
}
}
for (Map<BytesReference, TypeBuilder> map : builders.values()) {
for (Map<String, TypeBuilder> map : builders.values()) {
for (TypeBuilder typeBuilder : map.values()) {
finalIdAsBytes = typeBuilder.canReuse(idAsBytes);
if (finalIdAsBytes != null) {

View File

@ -20,8 +20,6 @@
package org.elasticsearch.index.cache.id.simple;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.index.cache.id.IdReaderCache;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
@ -33,9 +31,9 @@ public class SimpleIdReaderCache implements IdReaderCache {
private final Object readerCacheKey;
private final ImmutableMap<BytesReference, SimpleIdReaderTypeCache> types;
private final ImmutableMap<String, SimpleIdReaderTypeCache> types;
public SimpleIdReaderCache(Object readerCacheKey, ImmutableMap<BytesReference, SimpleIdReaderTypeCache> types) {
public SimpleIdReaderCache(Object readerCacheKey, ImmutableMap<String, SimpleIdReaderTypeCache> types) {
this.readerCacheKey = readerCacheKey;
this.types = types;
}
@ -47,12 +45,12 @@ public class SimpleIdReaderCache implements IdReaderCache {
@Override
public IdReaderTypeCache type(String type) {
return types.get(new BytesArray(type));
return types.get(type);
}
@Override
public HashedBytesArray parentIdByDoc(String type, int docId) {
SimpleIdReaderTypeCache typeCache = types.get(new BytesArray(type));
SimpleIdReaderTypeCache typeCache = types.get(type);
if (typeCache != null) {
return typeCache.parentIdByDoc(docId);
}
@ -61,7 +59,7 @@ public class SimpleIdReaderCache implements IdReaderCache {
@Override
public int docById(String type, HashedBytesArray id) {
SimpleIdReaderTypeCache typeCache = types.get(new BytesArray(type));
SimpleIdReaderTypeCache typeCache = types.get(type);
if (typeCache != null) {
return typeCache.docById(id);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.cache.id.simple;
import gnu.trove.impl.hash.TObjectHash;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
@ -31,7 +30,7 @@ import org.elasticsearch.index.cache.id.IdReaderTypeCache;
*/
public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
private final BytesReference type;
private final String type;
private final ExtTObjectIntHasMap<HashedBytesArray> idToDoc;
@ -43,7 +42,7 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
private long sizeInBytes = -1;
public SimpleIdReaderTypeCache(BytesReference type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<HashedBytesArray> idToDoc, HashedBytesArray[] docIdToId,
HashedBytesArray[] parentIdsValues, int[] parentIdsOrdinals) {
this.type = type;
this.idToDoc = idToDoc;
@ -53,7 +52,7 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
this.parentIdsOrdinals = parentIdsOrdinals;
}
public BytesReference type() {
public String type() {
return this.type;
}

View File

@ -19,6 +19,8 @@
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
@ -41,7 +43,7 @@ public class ChildCollector extends Collector {
private final SearchContext context;
private final Tuple<IndexReader, IdReaderTypeCache>[] readers;
private final Tuple<AtomicReader, IdReaderTypeCache>[] readers;
private final Map<Object, FixedBitSet> parentDocs;
@ -53,10 +55,12 @@ public class ChildCollector extends Collector {
this.parentDocs = new HashMap<Object, FixedBitSet>();
// create a specific type map lookup for faster lookup operations per doc
this.readers = new Tuple[context.searcher().subReaders().length];
this.readers = new Tuple[context.searcher().getIndexReader().leaves().size()];
for (int i = 0; i < readers.length; i++) {
IndexReader reader = context.searcher().subReaders()[i];
readers[i] = new Tuple<IndexReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
AtomicReaderContext readerContext = context.searcher().getIndexReader().leaves().get(i);
readers[i] = new Tuple<AtomicReader, IdReaderTypeCache>(
readerContext.reader(), context.idCache().reader(readerContext.reader()).type(parentType)
);
}
}
@ -75,14 +79,14 @@ public class ChildCollector extends Collector {
if (parentId == null) {
return;
}
for (Tuple<IndexReader, IdReaderTypeCache> tuple : readers) {
for (Tuple<AtomicReader, IdReaderTypeCache> tuple : readers) {
IndexReader indexReader = tuple.v1();
IdReaderTypeCache idReaderTypeCache = tuple.v2();
if (idReaderTypeCache == null) { // might be if we don't have that doc with that type in this reader
continue;
}
int parentDocId = idReaderTypeCache.docById(parentId);
if (parentDocId != -1 && !indexReader.isDeleted(parentDocId)) {
if (parentDocId != -1) {
FixedBitSet docIdSet = parentDocs().get(indexReader.getCoreCacheKey());
if (docIdSet == null) {
docIdSet = new FixedBitSet(indexReader.maxDoc());
@ -95,8 +99,8 @@ public class ChildCollector extends Collector {
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
typeCache = context.idCache().reader(reader).type(parentType);
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
}
@Override

View File

@ -20,11 +20,13 @@
package org.elasticsearch.index.search.child;
import gnu.trove.set.hash.THashSet;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
@ -106,13 +108,15 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
parentDocs = null;
}
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (parentDocs == null) {
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
}
// np need to use acceptDocs, since the parentDocs were collected with a collector, which means those
// collected docs are not deleted
// ok to return null
return parentDocs.get(reader.getCoreCacheKey());
return parentDocs.get(context.reader().getCoreCacheKey());
}
}
@ -138,14 +142,14 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
collectedUids = ((UidCollector) collector).collectedUids;
}
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (collectedUids == null) {
throw new ElasticSearchIllegalStateException("has_child filter/query hasn't executed properly");
}
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(reader).type(parentType);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
return new ParentDocSet(reader, collectedUids, idReaderTypeCache);
return new ParentDocSet(context.reader(), collectedUids, idReaderTypeCache, acceptDocs);
} else {
return null;
}
@ -163,16 +167,18 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
final IndexReader reader;
final THashSet<HashedBytesArray> parents;
final IdReaderTypeCache typeCache;
final Bits acceptDocs;
ParentDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
ParentDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache, Bits acceptDocs) {
super(reader.maxDoc());
this.reader = reader;
this.parents = parents;
this.typeCache = typeCache;
this.acceptDocs = acceptDocs;
}
public boolean get(int doc) {
return !reader.isDeleted(doc) && parents.contains(typeCache.idByDoc(doc));
return !acceptDocs.get(doc) && parents.contains(typeCache.idByDoc(doc));
}
}
@ -196,8 +202,8 @@ public abstract class HasChildFilter extends Filter implements ScopePhase.Collec
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
typeCache = context.idCache().reader(reader).type(parentType);
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
}
}
}

View File

@ -20,11 +20,14 @@
package org.elasticsearch.index.search.child;
import gnu.trove.set.hash.THashSet;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
@ -104,14 +107,14 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
parents = ((ParentUidsCollector) collector).collectedUids;
}
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
if (parents == null) {
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
}
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(reader).type(parentType);
IdReaderTypeCache idReaderTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
if (idReaderTypeCache != null) {
return new ChildrenDocSet(reader, parents, idReaderTypeCache);
return new ChildrenDocSet(readerContext.reader(), parents, idReaderTypeCache, acceptDocs);
} else {
return null;
}
@ -129,16 +132,18 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
final IndexReader reader;
final THashSet<HashedBytesArray> parents;
final IdReaderTypeCache idReaderTypeCache;
final Bits acceptDocs;
ChildrenDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
ChildrenDocSet(IndexReader reader, THashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache, Bits acceptDocs) {
super(reader.maxDoc());
this.reader = reader;
this.parents = parents;
this.idReaderTypeCache = idReaderTypeCache;
this.acceptDocs = acceptDocs;
}
public boolean get(int doc) {
return !reader.isDeleted(doc) && parents.contains(idReaderTypeCache.parentIdByDoc(doc));
return !acceptDocs.get(doc) && parents.contains(idReaderTypeCache.parentIdByDoc(doc));
}
}
@ -161,8 +166,9 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
collectedUids.add(typeCache.idByDoc(doc));
}
public void setNextReader(IndexReader reader, int docBase) throws IOException {
typeCache = context.idCache().reader(reader).type(parentType);
@Override
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
}
}
@ -188,12 +194,12 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
parentDocs = ((ParentDocsCollector) collector).segmentResults;
}
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs) throws IOException {
if (parentDocs == null) {
throw new ElasticSearchIllegalStateException("has_parent filter/query hasn't executed properly");
}
return new ChildrenDocSet(reader, parentDocs, context, parentType);
return new ChildrenDocSet(readerContext.reader(), parentDocs, context, parentType, acceptDocs);
}
public void clear() {
@ -203,25 +209,27 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
static class ChildrenDocSet extends GetDocSet {
final IdReaderTypeCache currentTypeCache;
final IndexReader currentReader;
final Tuple<IndexReader, IdReaderTypeCache>[] readersToTypeCache;
final AtomicReader currentReader;
final Tuple<AtomicReader, IdReaderTypeCache>[] readersToTypeCache;
final Map<Object, FixedBitSet> parentDocs;
final Bits acceptDocs;
ChildrenDocSet(IndexReader currentReader, Map<Object, FixedBitSet> parentDocs,
SearchContext context, String parentType) {
ChildrenDocSet(AtomicReader currentReader, Map<Object, FixedBitSet> parentDocs,
SearchContext context, String parentType, Bits acceptDocs) {
super(currentReader.maxDoc());
this.acceptDocs = acceptDocs;
this.currentTypeCache = context.idCache().reader(currentReader).type(parentType);
this.currentReader = currentReader;
this.parentDocs = parentDocs;
this.readersToTypeCache = new Tuple[context.searcher().subReaders().length];
this.readersToTypeCache = new Tuple[context.searcher().getIndexReader().leaves().size()];
for (int i = 0; i < readersToTypeCache.length; i++) {
IndexReader reader = context.searcher().subReaders()[i];
readersToTypeCache[i] = new Tuple<IndexReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
AtomicReader reader = context.searcher().getIndexReader().leaves().get(i).reader();
readersToTypeCache[i] = new Tuple<AtomicReader, IdReaderTypeCache>(reader, context.idCache().reader(reader).type(parentType));
}
}
public boolean get(int doc) {
if (currentReader.isDeleted(doc) || doc == -1) {
if (acceptDocs.get(doc) || doc == -1) {
return false;
}
@ -230,7 +238,7 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
return false;
}
for (Tuple<IndexReader, IdReaderTypeCache> readerTypeCacheTuple : readersToTypeCache) {
for (Tuple<AtomicReader, IdReaderTypeCache> readerTypeCacheTuple : readersToTypeCache) {
int parentDocId = readerTypeCacheTuple.v2().docById(parentId);
if (parentDocId == -1) {
continue;
@ -254,8 +262,9 @@ public abstract class HasParentFilter extends Filter implements ScopePhase.Colle
current.set(doc);
}
public void setNextReader(IndexReader reader, int docBase) throws IOException {
segmentResults.put(reader.getCoreCacheKey(), current = new FixedBitSet(reader.maxDoc()));
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
segmentResults.put(context.reader().getCoreCacheKey(), current = new FixedBitSet(context.reader().maxDoc()));
}
}
}

View File

@ -20,9 +20,9 @@
package org.elasticsearch.index.search.child;
import gnu.trove.map.hash.TIntObjectHashMap;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchIllegalStateException;
@ -124,20 +124,21 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
public void processResults(TopDocs topDocs, SearchContext context) {
Map<Object, TIntObjectHashMap<ParentDoc>> parentDocsPerReader = new HashMap<Object, TIntObjectHashMap<ParentDoc>>();
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
int readerIndex = context.searcher().readerIndex(scoreDoc.doc);
IndexReader subReader = context.searcher().subReaders()[readerIndex];
int subDoc = scoreDoc.doc - context.searcher().docStarts()[readerIndex];
int readerIndex = ReaderUtil.subIndex(scoreDoc.doc, context.searcher().getIndexReader().leaves());
AtomicReaderContext subContext = context.searcher().getIndexReader().leaves().get(readerIndex);
int subDoc = scoreDoc.doc - subContext.docBase;
// find the parent id
HashedBytesArray parentId = context.idCache().reader(subReader).parentIdByDoc(parentType, subDoc);
HashedBytesArray parentId = context.idCache().reader(subContext.reader()).parentIdByDoc(parentType, subDoc);
if (parentId == null) {
// no parent found
continue;
}
// now go over and find the parent doc Id and reader tuple
for (IndexReader indexReader : context.searcher().subReaders()) {
for (AtomicReaderContext atomicReaderContext : context.searcher().getIndexReader().leaves()) {
AtomicReader indexReader = atomicReaderContext.reader();
int parentDocId = context.idCache().reader(indexReader).docById(parentType, parentId);
if (parentDocId != -1 && !indexReader.isDeleted(parentDocId)) {
if (parentDocId != -1 && !indexReader.getLiveDocs().get(parentDocId)) {
// we found a match, add it and break
TIntObjectHashMap<ParentDoc> readerParentDocs = parentDocsPerReader.get(indexReader.getCoreCacheKey());
@ -205,15 +206,15 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
}
@Override
public Weight createWeight(Searcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher) throws IOException {
if (!properlyInvoked) {
throw new ElasticSearchIllegalStateException("top_children query hasn't executed properly");
}
if (parentDocs != null) {
return new ParentWeight(searcher, query.weight(searcher));
return new ParentWeight(searcher, query.createWeight(searcher));
}
return query.weight(searcher);
return query.createWeight(searcher);
}
public String toString(String field) {
@ -225,11 +226,11 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
class ParentWeight extends Weight {
final Searcher searcher;
final IndexSearcher searcher;
final Weight queryWeight;
public ParentWeight(Searcher searcher, Weight queryWeight) throws IOException {
public ParentWeight(IndexSearcher searcher, Weight queryWeight) throws IOException {
this.searcher = searcher;
this.queryWeight = queryWeight;
}
@ -243,28 +244,28 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
}
@Override
public float sumOfSquaredWeights() throws IOException {
float sum = queryWeight.sumOfSquaredWeights();
public float getValueForNormalization() throws IOException {
float sum = queryWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm) {
// nothing to do here....
public void normalize(float norm, float topLevelBoost) {
// Nothing to normalize
}
@Override
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder, boolean topScorer) throws IOException {
ParentDoc[] readerParentDocs = parentDocs.get(reader.getCoreCacheKey());
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
ParentDoc[] readerParentDocs = parentDocs.get(context.reader().getCoreCacheKey());
if (readerParentDocs != null) {
return new ParentScorer(getSimilarity(searcher), readerParentDocs);
return new ParentScorer(this, readerParentDocs);
}
return new EmptyScorer(getSimilarity(searcher));
return new EmptyScorer(this);
}
@Override
public Explanation explain(IndexReader reader, int doc) throws IOException {
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
}
@ -275,8 +276,8 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
private int index = -1;
private ParentScorer(Similarity similarity, ParentDoc[] docs) throws IOException {
super(similarity);
private ParentScorer(ParentWeight weight, ParentDoc[] docs) throws IOException {
super(weight);
this.docs = docs;
}
@ -315,5 +316,10 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
}
throw new ElasticSearchIllegalStateException("No support for score type [" + scoreType + "]");
}
@Override
public float freq() throws IOException {
return docs[index].count; // The number of matches in the child doc, which is propagated to parent
}
}
}