This deprecates `Rounding#round` and `Rounding#nextRoundingValue` in favor of calling ``` Rounding.Prepared prepared = rounding.prepare(min, max); ... prepared.round(val) ``` because it is always going to be faster to prepare once. There are going to be some cases where we won't know what to prepare *for* and in those cases you can call `prepareForUnknown` and stil be faster than calling the deprecated method over and over and over again. Ultimately, this is important because it doesn't look like there is an easy way to cache `Rounding.Prepared` or any of its precursors like `LocalTimeOffset.Lookup`. Instead, we can just build it at most once per request. Relates to #56124
This commit is contained in:
parent
9bcb41a39e
commit
44a79d1739
|
@ -240,9 +240,9 @@ public abstract class Rounding implements Writeable {
|
|||
|
||||
/**
|
||||
* Rounds the given value.
|
||||
* <p>
|
||||
* Prefer {@link #prepare(long, long)} if rounding many values.
|
||||
* @deprecated Prefer {@link #prepare} and then {@link Prepared#round(long)}
|
||||
*/
|
||||
@Deprecated
|
||||
public final long round(long utcMillis) {
|
||||
return prepare(utcMillis, utcMillis).round(utcMillis);
|
||||
}
|
||||
|
@ -252,9 +252,9 @@ public abstract class Rounding implements Writeable {
|
|||
* {@link #round(long)}, returns the next rounding value. For
|
||||
* example, with interval based rounding, if the interval is
|
||||
* 3, {@code nextRoundValue(6) = 9}.
|
||||
* <p>
|
||||
* Prefer {@link #prepare(long, long)} if rounding many values.
|
||||
* @deprecated Prefer {@link #prepare} and then {@link Prepared#nextRoundingValue(long)}
|
||||
*/
|
||||
@Deprecated
|
||||
public final long nextRoundingValue(long utcMillis) {
|
||||
return prepare(utcMillis, utcMillis).nextRoundingValue(utcMillis);
|
||||
}
|
||||
|
|
|
@ -60,16 +60,16 @@ public class RoundingDuelTests extends ESTestCase {
|
|||
public void testDuellingImplementations() {
|
||||
org.elasticsearch.common.Rounding.DateTimeUnit randomDateTimeUnit =
|
||||
randomFrom(org.elasticsearch.common.Rounding.DateTimeUnit.values());
|
||||
org.elasticsearch.common.Rounding rounding;
|
||||
org.elasticsearch.common.Rounding.Prepared rounding;
|
||||
Rounding roundingJoda;
|
||||
|
||||
if (randomBoolean()) {
|
||||
rounding = org.elasticsearch.common.Rounding.builder(randomDateTimeUnit).timeZone(ZoneOffset.UTC).build();
|
||||
rounding = org.elasticsearch.common.Rounding.builder(randomDateTimeUnit).timeZone(ZoneOffset.UTC).build().prepareForUnknown();
|
||||
DateTimeUnit dateTimeUnit = DateTimeUnit.resolve(randomDateTimeUnit.getId());
|
||||
roundingJoda = Rounding.builder(dateTimeUnit).timeZone(DateTimeZone.UTC).build();
|
||||
} else {
|
||||
TimeValue interval = timeValue();
|
||||
rounding = org.elasticsearch.common.Rounding.builder(interval).timeZone(ZoneOffset.UTC).build();
|
||||
rounding = org.elasticsearch.common.Rounding.builder(interval).timeZone(ZoneOffset.UTC).build().prepareForUnknown();
|
||||
roundingJoda = Rounding.builder(interval).timeZone(DateTimeZone.UTC).build();
|
||||
}
|
||||
|
||||
|
|
|
@ -157,6 +157,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
|
||||
int roundingIndex = reduced.getBucketInfo().roundingIdx;
|
||||
RoundingInfo roundingInfo = AutoDateHistogramAggregationBuilder.buildRoundings(null, null)[roundingIndex];
|
||||
Rounding.Prepared prepared = roundingInfo.rounding.prepare(lowest, highest);
|
||||
|
||||
long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis();
|
||||
int innerIntervalIndex = 0;
|
||||
|
@ -185,7 +186,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
} else {
|
||||
do {
|
||||
innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex];
|
||||
int bucketCountAtInterval = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse);
|
||||
int bucketCountAtInterval = getBucketCount(lowest, highest, prepared, innerIntervalToUse);
|
||||
if (bucketCountAtInterval == reduced.getBuckets().size()) {
|
||||
break;
|
||||
}
|
||||
|
@ -199,11 +200,11 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
assertThat(reduced.getInterval().toString(), equalTo(innerIntervalToUse + roundingInfo.unitAbbreviation));
|
||||
Map<Instant, Long> expectedCounts = new TreeMap<>();
|
||||
if (totalBucketConut > 0) {
|
||||
long keyForBucket = roundingInfo.rounding.round(lowest);
|
||||
while (keyForBucket <= roundingInfo.rounding.round(highest)) {
|
||||
long keyForBucket = prepared.round(lowest);
|
||||
while (keyForBucket <= prepared.round(highest)) {
|
||||
long nextKey = keyForBucket;
|
||||
for (int i = 0; i < innerIntervalToUse; i++) {
|
||||
nextKey = roundingInfo.rounding.nextRoundingValue(nextKey);
|
||||
nextKey = prepared.nextRoundingValue(nextKey);
|
||||
}
|
||||
Instant key = Instant.ofEpochMilli(keyForBucket);
|
||||
expectedCounts.put(key, 0L);
|
||||
|
@ -214,7 +215,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
|
||||
for (InternalAutoDateHistogram histogram : inputs) {
|
||||
for (Histogram.Bucket bucket : histogram.getBuckets()) {
|
||||
long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
|
||||
long roundedBucketKey = prepared.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
|
||||
long docCount = bucket.getDocCount();
|
||||
if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) {
|
||||
expectedCounts.compute(key,
|
||||
|
@ -227,8 +228,8 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
|
||||
// If there is only a single bucket, and we haven't added it above, add a bucket with no documents.
|
||||
// this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above.
|
||||
if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) {
|
||||
expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L);
|
||||
if (prepared.round(lowest) == prepared.round(highest) && expectedCounts.isEmpty()) {
|
||||
expectedCounts.put(Instant.ofEpochMilli(prepared.round(lowest)), 0L);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -249,12 +250,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
assertThat(reduced.getInterval(), equalTo(expectedInterval));
|
||||
}
|
||||
|
||||
private int getBucketCount(long min, long max, Rounding rounding, int interval) {
|
||||
private int getBucketCount(long min, long max, Rounding.Prepared prepared, int interval) {
|
||||
int bucketCount = 0;
|
||||
long key = rounding.round(min);
|
||||
long key = prepared.round(min);
|
||||
while (key < max) {
|
||||
for (int i = 0; i < interval; i++) {
|
||||
key = rounding.nextRoundingValue(key);
|
||||
key = prepared.nextRoundingValue(key);
|
||||
}
|
||||
bucketCount++;
|
||||
}
|
||||
|
|
|
@ -58,7 +58,8 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
|
|||
long interval = randomIntBetween(1, 3);
|
||||
intervalMillis = randomFrom(timeValueSeconds(interval), timeValueMinutes(interval), timeValueHours(interval)).getMillis();
|
||||
Rounding rounding = Rounding.builder(TimeValue.timeValueMillis(intervalMillis)).build();
|
||||
baseMillis = rounding.round(System.currentTimeMillis());
|
||||
long now = System.currentTimeMillis();
|
||||
baseMillis = rounding.prepare(now, now).round(now);
|
||||
if (randomBoolean()) {
|
||||
minDocCount = randomIntBetween(1, 10);
|
||||
emptyBucketInfo = null;
|
||||
|
@ -108,8 +109,12 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
|
|||
long minBound = -1;
|
||||
long maxBound = -1;
|
||||
if (emptyBucketInfo.bounds != null) {
|
||||
minBound = emptyBucketInfo.rounding.round(emptyBucketInfo.bounds.getMin());
|
||||
maxBound = emptyBucketInfo.rounding.round(emptyBucketInfo.bounds.getMax());
|
||||
Rounding.Prepared prepared = emptyBucketInfo.rounding.prepare(
|
||||
emptyBucketInfo.bounds.getMin(),
|
||||
emptyBucketInfo.bounds.getMax()
|
||||
);
|
||||
minBound = prepared.round(emptyBucketInfo.bounds.getMin());
|
||||
maxBound = prepared.round(emptyBucketInfo.bounds.getMax());
|
||||
if (expectedCounts.isEmpty() && minBound <= maxBound) {
|
||||
expectedCounts.put(minBound, 0L);
|
||||
}
|
||||
|
|
|
@ -304,7 +304,7 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject {
|
|||
/**
|
||||
* Create the rounding for this date histogram
|
||||
*/
|
||||
public Rounding createRounding() {
|
||||
public Rounding.Prepared createRounding() {
|
||||
return createRounding(interval.toString(), timeZone);
|
||||
}
|
||||
|
||||
|
@ -363,7 +363,7 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject {
|
|||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private static Rounding createRounding(final String expr, final String timeZone) {
|
||||
private static Rounding.Prepared createRounding(final String expr, final String timeZone) {
|
||||
Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(expr);
|
||||
final Rounding.Builder rounding;
|
||||
if (timeUnit != null) {
|
||||
|
@ -372,6 +372,6 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject {
|
|||
rounding = new Rounding.Builder(TimeValue.parseTimeValue(expr, "createRounding"));
|
||||
}
|
||||
rounding.timeZone(ZoneId.of(timeZone, ZoneId.SHORT_IDS));
|
||||
return rounding.build();
|
||||
return rounding.build().prepareForUnknown();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -209,13 +209,27 @@ public class DateHistogramGroupSource extends SingleGroupSource {
|
|||
|
||||
private final Interval interval;
|
||||
private final ZoneId timeZone;
|
||||
private Rounding rounding;
|
||||
private final Rounding.Prepared rounding;
|
||||
|
||||
public DateHistogramGroupSource(String field, ScriptConfig scriptConfig, Interval interval, ZoneId timeZone) {
|
||||
super(field, scriptConfig);
|
||||
this.interval = interval;
|
||||
this.timeZone = timeZone;
|
||||
rounding = buildRounding();
|
||||
}
|
||||
|
||||
public DateHistogramGroupSource(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
this.interval = readInterval(in);
|
||||
this.timeZone = in.readOptionalZoneId();
|
||||
// Format was optional in 7.2.x, removed in 7.3+
|
||||
if (in.getVersion().before(Version.V_7_3_0)) {
|
||||
in.readOptionalString();
|
||||
}
|
||||
rounding = buildRounding();
|
||||
}
|
||||
|
||||
private Rounding.Prepared buildRounding() {
|
||||
Rounding.DateTimeUnit timeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString());
|
||||
final Rounding.Builder roundingBuilder;
|
||||
if (timeUnit != null) {
|
||||
|
@ -227,17 +241,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
|
|||
if (timeZone != null) {
|
||||
roundingBuilder.timeZone(timeZone);
|
||||
}
|
||||
this.rounding = roundingBuilder.build();
|
||||
}
|
||||
|
||||
public DateHistogramGroupSource(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
this.interval = readInterval(in);
|
||||
this.timeZone = in.readOptionalZoneId();
|
||||
// Format was optional in 7.2.x, removed in 7.3+
|
||||
if (in.getVersion().before(Version.V_7_3_0)) {
|
||||
in.readOptionalString();
|
||||
}
|
||||
return roundingBuilder.build().prepareForUnknown();
|
||||
}
|
||||
|
||||
private static ConstructingObjectParser<DateHistogramGroupSource, Void> createParser(boolean lenient) {
|
||||
|
@ -296,7 +300,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
|
|||
return timeZone;
|
||||
}
|
||||
|
||||
public Rounding getRounding() {
|
||||
Rounding.Prepared getRounding() {
|
||||
return rounding;
|
||||
}
|
||||
|
||||
|
|
|
@ -283,7 +283,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
|
|||
asMap("the_histo", now)
|
||||
)
|
||||
);
|
||||
final Rounding rounding = dateHistoConfig.createRounding();
|
||||
final Rounding.Prepared rounding = dateHistoConfig.createRounding();
|
||||
executeTestCase(dataset, job, now, (resp) -> {
|
||||
assertThat(resp.size(), equalTo(3));
|
||||
IndexRequest request = resp.get(0);
|
||||
|
@ -350,7 +350,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
|
|||
asMap("the_histo", now)
|
||||
)
|
||||
);
|
||||
final Rounding rounding = dateHistoConfig.createRounding();
|
||||
final Rounding.Prepared rounding = dateHistoConfig.createRounding();
|
||||
executeTestCase(dataset, job, now, (resp) -> {
|
||||
assertThat(resp.size(), equalTo(2));
|
||||
IndexRequest request = resp.get(0);
|
||||
|
|
Loading…
Reference in New Issue