Tests: fix GeoHashGridAggregatorTests expectations (#23556)
Currently GeoHashGridAggregatorTests#testWithSeveralDocs increases the expected document count per hash for each geo point added to a document. When points added to the same doc fall into one bucket (one hash cell) the document should only be counted once. Closes #23555
This commit is contained in:
parent
21dcd4f4ca
commit
a8117a2d77
|
@ -36,8 +36,10 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
|
@ -68,15 +70,20 @@ public class GeoHashGridAggregatorTests extends AggregatorTestCase {
|
|||
Map<String, Integer> expectedCountPerGeoHash = new HashMap<>();
|
||||
testCase(new MatchAllDocsQuery(), FIELD_NAME, precision, iw -> {
|
||||
List<LatLonDocValuesField> points = new ArrayList<>();
|
||||
Set<String> distinctHashesPerDoc = new HashSet<>();
|
||||
for (int pointId = 0; pointId < numPoints; pointId++) {
|
||||
double lat = (180d * randomDouble()) - 90d;
|
||||
double lng = (360d * randomDouble()) - 180d;
|
||||
points.add(new LatLonDocValuesField(FIELD_NAME, lat, lng));
|
||||
String hash = stringEncode(lng, lat, precision);
|
||||
expectedCountPerGeoHash.put(hash, expectedCountPerGeoHash.getOrDefault(hash, 0) + 1);
|
||||
if (distinctHashesPerDoc.contains(hash) == false) {
|
||||
expectedCountPerGeoHash.put(hash, expectedCountPerGeoHash.getOrDefault(hash, 0) + 1);
|
||||
}
|
||||
distinctHashesPerDoc.add(hash);
|
||||
if (usually()) {
|
||||
iw.addDocument(points);
|
||||
points.clear();
|
||||
distinctHashesPerDoc.clear();
|
||||
}
|
||||
}
|
||||
if (points.size() != 0) {
|
||||
|
|
Loading…
Reference in New Issue