Composite aggregation must check live docs when the index is sorted (#63864)

This change ensures that the live docs are checked in the composite aggregator
when the index is sorted.
This commit is contained in:
Jim Ferenczi 2020-10-20 11:39:25 +02:00 committed by jimczi
parent 1880bcdc09
commit 3423f214dd
2 changed files with 21 additions and 4 deletions

View File

@ -42,6 +42,7 @@ import org.apache.lucene.search.SortedNumericSelector;
import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.search.comparators.LongComparator;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.RoaringDocIdSet; import org.apache.lucene.util.RoaringDocIdSet;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexSortConfig;
@ -367,8 +368,11 @@ final class CompositeAggregator extends BucketsAggregator {
final LeafBucketCollector inner = queue.getLeafCollector(ctx, final LeafBucketCollector inner = queue.getLeafCollector(ctx,
getFirstPassCollector(docIdSetBuilder, indexSortPrefix.getSort().length)); getFirstPassCollector(docIdSetBuilder, indexSortPrefix.getSort().length));
inner.setScorer(scorer); inner.setScorer(scorer);
final Bits liveDocs = ctx.reader().getLiveDocs();
while (docIt.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { while (docIt.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
inner.collect(docIt.docID()); if (liveDocs == null || liveDocs.get(docIt.docID())) {
inner.collect(docIt.docID());
}
} }
} }
} }

View File

@ -2266,14 +2266,26 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
} }
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) {
Document document = new Document(); Document document = new Document();
int id = 0;
for (Map<String, List<Object>> fields : dataset) { for (Map<String, List<Object>> fields : dataset) {
addToDocument(document, fields);
indexWriter.addDocument(document);
document.clear(); document.clear();
addToDocument(id, document, fields);
indexWriter.addDocument(document);
id++;
} }
if (rarely()) { if (rarely()) {
indexWriter.forceMerge(1); indexWriter.forceMerge(1);
} }
if (dataset.size() > 0) {
int numDeletes = randomIntBetween(1, 25);
for (int i = 0; i < numDeletes; i++) {
id = randomIntBetween(0, dataset.size() - 1);
indexWriter.deleteDocuments(new Term("id", Integer.toString(id)));
document.clear();
addToDocument(id, document, dataset.get(id));
indexWriter.addDocument(document);
}
}
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = new IndexSearcher(indexReader); IndexSearcher indexSearcher = new IndexSearcher(indexReader);
@ -2298,7 +2310,8 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
return IndexSettingsModule.newIndexSettings(new Index("_index", "0"), builder.build()); return IndexSettingsModule.newIndexSettings(new Index("_index", "0"), builder.build());
} }
private void addToDocument(Document doc, Map<String, List<Object>> keys) { private void addToDocument(int id, Document doc, Map<String, List<Object>> keys) {
doc.add(new StringField("id", Integer.toString(id), Field.Store.NO));
for (Map.Entry<String, List<Object>> entry : keys.entrySet()) { for (Map.Entry<String, List<Object>> entry : keys.entrySet()) {
final String name = entry.getKey(); final String name = entry.getKey();
for (Object value : entry.getValue()) { for (Object value : entry.getValue()) {