Aggregations: Do not take deleted documents into account in aggregations filters.

Since aggregators are only called on documents that match the query, it never
gets called on deleted documents, so by specifying `null` as live docs, we very
likely remove a BitsFilteredDocIdSet layer.

Close #8540
This commit is contained in:
Adrien Grand 2014-11-18 22:30:16 +01:00
parent cca5934e9d
commit f30a0e846d
4 changed files with 22 additions and 8 deletions

View File

@ -51,7 +51,7 @@ public class FilterAggregator extends SingleBucketAggregator {
@Override @Override
public void setNextReader(LeafReaderContext reader) { public void setNextReader(LeafReaderContext reader) {
try { try {
bits = DocIdSets.toSafeBits(reader.reader(), filter.getDocIdSet(reader, reader.reader().getLiveDocs())); bits = DocIdSets.toSafeBits(reader.reader(), filter.getDocIdSet(reader, null));
} catch (IOException ioe) { } catch (IOException ioe) {
throw new AggregationExecutionException("Failed to aggregate filter aggregator [" + name + "]", ioe); throw new AggregationExecutionException("Failed to aggregate filter aggregator [" + name + "]", ioe);
} }

View File

@ -70,7 +70,7 @@ public class FiltersAggregator extends BucketsAggregator {
public void setNextReader(LeafReaderContext reader) { public void setNextReader(LeafReaderContext reader) {
try { try {
for (int i = 0; i < filters.length; i++) { for (int i = 0; i < filters.length; i++) {
bits[i] = DocIdSets.toSafeBits(reader.reader(), filters[i].filter.getDocIdSet(reader, reader.reader().getLiveDocs())); bits[i] = DocIdSets.toSafeBits(reader.reader(), filters[i].filter.getDocIdSet(reader, null));
} }
} catch (IOException ioe) { } catch (IOException ioe) {
throw new AggregationExecutionException("Failed to aggregate filter aggregator [" + name + "]", ioe); throw new AggregationExecutionException("Failed to aggregate filter aggregator [" + name + "]", ioe);

View File

@ -64,12 +64,17 @@ public class FilterTests extends ElasticsearchIntegrationTest {
.endObject())); .endObject()));
} }
for (int i = numTag1Docs; i < numDocs; i++) { for (int i = numTag1Docs; i < numDocs; i++) {
builders.add(client().prepareIndex("idx", "type", ""+i).setSource(jsonBuilder() IndexRequestBuilder req = client().prepareIndex("idx", "type", ""+i).setSource(jsonBuilder()
.startObject() .startObject()
.field("value", i) .field("value", i)
.field("tag", "tag2") .field("tag", "tag2")
.field("name", "name" + i) .field("name", "name" + i)
.endObject())); .endObject());
builders.add(req);
if (randomBoolean()) {
// randomly index the document twice so that we have deleted docs that match the filter
builders.add(req);
}
} }
prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet(); prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet();
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {

View File

@ -60,20 +60,29 @@ public class FiltersTests extends ElasticsearchIntegrationTest {
numTag1Docs = randomIntBetween(1, numDocs - 1); numTag1Docs = randomIntBetween(1, numDocs - 1);
List<IndexRequestBuilder> builders = new ArrayList<>(); List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numTag1Docs; i++) { for (int i = 0; i < numTag1Docs; i++) {
builders.add(client().prepareIndex("idx", "type", ""+i).setSource(jsonBuilder() IndexRequestBuilder req = client().prepareIndex("idx", "type", ""+i).setSource(jsonBuilder()
.startObject() .startObject()
.field("value", i + 1) .field("value", i + 1)
.field("tag", "tag1") .field("tag", "tag1")
.endObject())); .endObject());
builders.add(req);
if (randomBoolean()) {
// randomly index the document twice so that we have deleted docs that match the filter
builders.add(req);
}
} }
for (int i = numTag1Docs; i < numDocs; i++) { for (int i = numTag1Docs; i < numDocs; i++) {
numTag2Docs++; numTag2Docs++;
builders.add(client().prepareIndex("idx", "type", ""+i).setSource(jsonBuilder() IndexRequestBuilder req = client().prepareIndex("idx", "type", ""+i).setSource(jsonBuilder()
.startObject() .startObject()
.field("value", i) .field("value", i)
.field("tag", "tag2") .field("tag", "tag2")
.field("name", "name" + i) .field("name", "name" + i)
.endObject())); .endObject());
builders.add(req);
if (randomBoolean()) {
builders.add(req);
}
} }
prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet(); prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet();
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {