Composite aggregation must check live docs when the index is sorted (#63864)
This change ensures that the live docs are checked in the composite aggregator when the index is sorted.
This commit is contained in:
parent
1880bcdc09
commit
3423f214dd
|
@ -42,6 +42,7 @@ import org.apache.lucene.search.SortedNumericSelector;
|
|||
import org.apache.lucene.search.SortedNumericSortField;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.comparators.LongComparator;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.RoaringDocIdSet;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.index.IndexSortConfig;
|
||||
|
@ -367,8 +368,11 @@ final class CompositeAggregator extends BucketsAggregator {
|
|||
final LeafBucketCollector inner = queue.getLeafCollector(ctx,
|
||||
getFirstPassCollector(docIdSetBuilder, indexSortPrefix.getSort().length));
|
||||
inner.setScorer(scorer);
|
||||
final Bits liveDocs = ctx.reader().getLiveDocs();
|
||||
while (docIt.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
inner.collect(docIt.docID());
|
||||
if (liveDocs == null || liveDocs.get(docIt.docID())) {
|
||||
inner.collect(docIt.docID());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2266,14 +2266,26 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) {
|
||||
Document document = new Document();
|
||||
int id = 0;
|
||||
for (Map<String, List<Object>> fields : dataset) {
|
||||
addToDocument(document, fields);
|
||||
indexWriter.addDocument(document);
|
||||
document.clear();
|
||||
addToDocument(id, document, fields);
|
||||
indexWriter.addDocument(document);
|
||||
id++;
|
||||
}
|
||||
if (rarely()) {
|
||||
indexWriter.forceMerge(1);
|
||||
}
|
||||
if (dataset.size() > 0) {
|
||||
int numDeletes = randomIntBetween(1, 25);
|
||||
for (int i = 0; i < numDeletes; i++) {
|
||||
id = randomIntBetween(0, dataset.size() - 1);
|
||||
indexWriter.deleteDocuments(new Term("id", Integer.toString(id)));
|
||||
document.clear();
|
||||
addToDocument(id, document, dataset.get(id));
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
}
|
||||
}
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
|
||||
|
@ -2298,7 +2310,8 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
return IndexSettingsModule.newIndexSettings(new Index("_index", "0"), builder.build());
|
||||
}
|
||||
|
||||
private void addToDocument(Document doc, Map<String, List<Object>> keys) {
|
||||
private void addToDocument(int id, Document doc, Map<String, List<Object>> keys) {
|
||||
doc.add(new StringField("id", Integer.toString(id), Field.Store.NO));
|
||||
for (Map.Entry<String, List<Object>> entry : keys.entrySet()) {
|
||||
final String name = entry.getKey();
|
||||
for (Object value : entry.getValue()) {
|
||||
|
|
Loading…
Reference in New Issue