Aggregations: Fix geohash grid doc counts computation on multi-valued fields.

Close #8512
This commit is contained in:
Adrien Grand 2014-11-20 15:35:31 +01:00
parent 98641ff39f
commit abc0bc4c7f
2 changed files with 11 additions and 9 deletions

View File

@ -74,14 +74,18 @@ public class GeoHashGridAggregator extends BucketsAggregator {
values.setDocument(doc);
final int valuesCount = values.count();
long previous = Long.MAX_VALUE;
for (int i = 0; i < valuesCount; ++i) {
final long val = values.valueAt(i);
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
collectExistingBucket(doc, bucketOrdinal);
} else {
collectBucket(doc, bucketOrdinal);
if (previous != val || i == 0) {
long bucketOrdinal = bucketOrds.add(val);
if (bucketOrdinal < 0) { // already seen
bucketOrdinal = - 1 - bucketOrdinal;
collectExistingBucket(doc, bucketOrdinal);
} else {
collectBucket(doc, bucketOrdinal);
}
previous = val;
}
}
}

View File

@ -106,9 +106,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numDocs; i++) {
final int numPoints = random.nextInt(4);
List<String> points = new ArrayList<>();
// TODO (#8512): this should be a Set, not a List. Currently if a document has two positions that have
// the same geo hash, it will increase the doc_count for this geo hash by 2 instead of 1
List<String> geoHashes = new ArrayList<>();
Set<String> geoHashes = new HashSet<>();
for (int j = 0; j < numPoints; ++j) {
double lat = (180d * random.nextDouble()) - 90d;
double lng = (360d * random.nextDouble()) - 180d;