Fix sum test
It was relying on the compensated sum working but the test framework was dodging it. This forces the accuracy tests to come from a single shard where we get the proper compensated sum. Closes #56757
This commit is contained in:
parent
da833d6cd3
commit
7b626826eb
|
@ -160,11 +160,10 @@ public class SumAggregatorTests extends AggregatorTestCase {
|
||||||
"Re-index with correct docvalues type.", e.getMessage());
|
"Re-index with correct docvalues type.", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/56757")
|
|
||||||
public void testSummationAccuracy() throws IOException {
|
public void testSummationAccuracy() throws IOException {
|
||||||
// Summing up a normal array and expect an accurate value
|
// Summing up a normal array and expect an accurate value
|
||||||
double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7};
|
double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7};
|
||||||
verifySummationOfDoubles(values, 15.3, 0d);
|
verifySummationOfDoubles(values, 15.3, Double.MIN_NORMAL);
|
||||||
|
|
||||||
// Summing up an array which contains NaN and infinities and expect a result same as naive summation
|
// Summing up an array which contains NaN and infinities and expect a result same as naive summation
|
||||||
int n = randomIntBetween(5, 10);
|
int n = randomIntBetween(5, 10);
|
||||||
|
@ -197,9 +196,21 @@ public class SumAggregatorTests extends AggregatorTestCase {
|
||||||
sum("_name").field(FIELD_NAME),
|
sum("_name").field(FIELD_NAME),
|
||||||
new MatchAllDocsQuery(),
|
new MatchAllDocsQuery(),
|
||||||
iw -> {
|
iw -> {
|
||||||
for (double value : values) {
|
/*
|
||||||
iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value))));
|
* The sum agg uses a Kahan sumation on the shard to limit
|
||||||
}
|
* floating point errors. But it doesn't ship the sums to the
|
||||||
|
* coordinating node, so floaing point error can creep in when
|
||||||
|
* reducing many sums. The test framework aggregates each
|
||||||
|
* segment as though it were a separate shard, then reduces
|
||||||
|
* those togther. Fun. But it means we don't get the full
|
||||||
|
* accuracy of the Kahan sumation. And *that* accuracy is
|
||||||
|
* what this method is trying to test. So we have to stick
|
||||||
|
* all the documents on the same leaf. `addDocuments` does
|
||||||
|
* that.
|
||||||
|
*/
|
||||||
|
iw.addDocuments(Arrays.stream(values).mapToObj(value ->
|
||||||
|
singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value)))
|
||||||
|
).collect(toList()));
|
||||||
},
|
},
|
||||||
result -> assertEquals(expected, result.getValue(), delta),
|
result -> assertEquals(expected, result.getValue(), delta),
|
||||||
defaultFieldType(NumberType.DOUBLE)
|
defaultFieldType(NumberType.DOUBLE)
|
||||||
|
|
Loading…
Reference in New Issue