Optimize date_histograms across daylight savings time (backport of #55559) (#56334)

Rounding dates on a shard that contains a daylight savings time transition
is currently something like 1400% slower than when a shard contains dates
only on one side of the DST transition. And it makes a ton of short lived
garbage. This replaces that implementation with one that benchmarks to
having around 30% overhead instead of the 1400%. And it doesn't generate
any garbage per search hit.

Some background:
There are two ways to round in ES:
* Round to the nearest time unit (Day/Hour/Week/Month/etc)
* Round to the nearest time *interval* (3 days/2 weeks/etc)

I'm only optimizing the first one in this change and plan to do the second
in a follow up. It turns out that rounding to the nearest unit really *is*
two problems: when the unit rounds to midnight (day/week/month/year) and
when it doesn't (hour/minute/second). Rounding to midnight is consistently
about 25% faster and rounding to individual hour or minutes.

This optimization relies on being able to *usually* figure out what the
minimum and maximum dates are on the shard. This is similar to an existing
optimization where we rewrite time zones that aren't fixed
(think America/New_York and its daylight savings time transitions) into
fixed time zones so long as there isn't a daylight savings time transition
on the shard (UTC-5 or UTC-4 for America/New_York). Once I implement
time interval rounding the time zone rewriting optimization *should* no
longer be needed.

This optimization doesn't come into play for `composite` or
`auto_date_histogram` aggs because neither have been migrated to the new
`DATE` `ValuesSourceType` which is where that range lookup happens. When
they are they will be able to pick up the optimization without much work.
I expect this to be substantial for `auto_date_histogram` but less so for
`composite` because it deals with fewer values.

Note: My 30% overhead figure comes from small numbers of daylight savings
time transitions. That overhead gets higher when there are more
transitions in logarithmic fashion. When there are two thousand years
worth of transitions my algorithm ends up being 250% slower than rounding
without a time zone, but java time is 47000% slower at that point,
allocating memory as fast as it possibly can.
This commit is contained in:
Nik Everett 2020-05-07 09:10:51 -04:00 committed by GitHub
parent 3bad5b3c01
commit e35919d3b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 2009 additions and 368 deletions

View File

@ -53,9 +53,9 @@ To get realistic results, you should exercise care when running benchmarks. Here
`performance` CPU governor. `performance` CPU governor.
* Vary the problem input size with `@Param`. * Vary the problem input size with `@Param`.
* Use the integrated profilers in JMH to dig deeper if benchmark results to not match your hypotheses: * Use the integrated profilers in JMH to dig deeper if benchmark results to not match your hypotheses:
* Run the generated uberjar directly and use `-prof gc` to check whether the garbage collector runs during a microbenchmarks and skews * Add `-prof gc` to the options to check whether the garbage collector runs during a microbenchmarks and skews
your results. If so, try to force a GC between runs (`-gc true`) but watch out for the caveats. your results. If so, try to force a GC between runs (`-gc true`) but watch out for the caveats.
* Use `-prof perf` or `-prof perfasm` (both only available on Linux) to see hotspots. * Add `-prof perf` or `-prof perfasm` (both only available on Linux) to see hotspots.
* Have your benchmarks peer-reviewed. * Have your benchmarks peer-reviewed.
### Don't ### Don't

View File

@ -0,0 +1,117 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import org.elasticsearch.common.time.DateFormatter;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import java.time.ZoneId;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
@Fork(2)
@Warmup(iterations = 10)
@Measurement(iterations = 5)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Benchmark)
public class RoundingBenchmark {
private static final DateFormatter FORMATTER = DateFormatter.forPattern("date_optional_time");
@Param({
"2000-01-01 to 2020-01-01", // A super long range
"2000-10-01 to 2000-11-01", // A whole month which is pretty believable
"2000-10-29 to 2000-10-30", // A date right around daylight savings time.
"2000-06-01 to 2000-06-02" // A date fully in one time zone. Should be much faster than above.
})
public String range;
@Param({ "java time", "es" })
public String rounder;
@Param({ "UTC", "America/New_York" })
public String zone;
@Param({ "MONTH_OF_YEAR", "HOUR_OF_DAY" })
public String timeUnit;
@Param({ "1", "10000", "1000000", "100000000" })
public int count;
private long min;
private long max;
private long[] dates;
private Supplier<Rounding.Prepared> rounderBuilder;
@Setup
public void buildDates() {
String[] r = range.split(" to ");
min = FORMATTER.parseMillis(r[0]);
max = FORMATTER.parseMillis(r[1]);
dates = new long[count];
long date = min;
long diff = (max - min) / dates.length;
for (int i = 0; i < dates.length; i++) {
if (date >= max) {
throw new IllegalStateException("made a bad date [" + date + "]");
}
dates[i] = date;
date += diff;
}
Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.valueOf(timeUnit)).timeZone(ZoneId.of(zone)).build();
switch (rounder) {
case "java time":
rounderBuilder = rounding::prepareJavaTime;
break;
case "es":
rounderBuilder = () -> rounding.prepare(min, max);
break;
default:
throw new IllegalArgumentException("Expectd rounder to be [java time] or [es]");
}
}
@Benchmark
public void round(Blackhole bh) {
Rounding.Prepared rounder = rounderBuilder.get();
for (int i = 0; i < dates.length; i++) {
bh.consume(rounder.round(dates[i]));
}
}
@Benchmark
public void nextRoundingValue(Blackhole bh) {
Rounding.Prepared rounder = rounderBuilder.get();
for (int i = 0; i < dates.length; i++) {
bh.consume(rounder.nextRoundingValue(dates[i]));
}
}
}

View File

@ -0,0 +1,641 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import java.time.Instant;
import java.time.ZoneId;
import java.time.zone.ZoneOffsetTransition;
import java.time.zone.ZoneOffsetTransitionRule;
import java.time.zone.ZoneRules;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
/**
* Converts utc into local time and back again.
* <p>
* "Local time" is defined by some time zone, specifically and {@link ZoneId}.
* At any point in time a particular time zone is at some offset from from
* utc. So converting from utc is as simple as adding the offset.
* <p>
* Getting from local time back to utc is harder. Most local times happen once.
* But some local times happen twice. And some don't happen at all. Take, for
* example, the time in my house. Most days I don't touch my clocks and I'm a
* constant offset from UTC. But once in the fall at 2am I roll my clock back.
* So at 5am utc my clocks say 1am. Then at 6am utc my clocks say 1am AGAIN.
* I do similarly terrifying things again in the spring when I skip my clocks
* straight from 1:59am to 3am.
* <p>
* So there are two methods to convert from local time back to utc,
* {@link #localToUtc(long, Strategy)} and {@link #localToUtcInThisOffset(long)}.
*/
public abstract class LocalTimeOffset {
/**
* Lookup offsets for a provided zone. This <strong>can</strong> fail if
* there are many transitions and the provided lookup would be very large.
*
* @return a {@linkplain Lookup} or {@code null} if none could be built
*/
public static Lookup lookup(ZoneId zone, long minUtcMillis, long maxUtcMillis) {
if (minUtcMillis > maxUtcMillis) {
throw new IllegalArgumentException("[" + minUtcMillis + "] must be <= [" + maxUtcMillis + "]");
}
ZoneRules rules = zone.getRules();
{
LocalTimeOffset fixed = checkForFixedZone(zone, rules);
if (fixed != null) {
return new FixedLookup(zone, fixed);
}
}
List<ZoneOffsetTransition> transitions = collectTransitions(zone, rules, minUtcMillis, maxUtcMillis);
if (transitions == null) {
// The range is too large for us to pre-build all the offsets
return null;
}
if (transitions.size() < 3) {
/*
* Its actually quite common that there are *very* few transitions.
* This case where there are only two transitions covers an entire
* year of data! In any case, it is slightly faster to do the
* "simpler" thing and compare the start times instead of perform
* a binary search when there are so few offsets to look at.
*/
return new LinkedListLookup(zone, minUtcMillis, maxUtcMillis, transitions);
}
return new TransitionArrayLookup(zone, minUtcMillis, maxUtcMillis, transitions);
}
/**
* Lookup offsets without any known min or max time. This will generally
* fail if the provided zone isn't fixed.
*
* @return a lookup function of {@code null} if none could be built
*/
public static LocalTimeOffset lookupFixedOffset(ZoneId zone) {
return checkForFixedZone(zone, zone.getRules());
}
private final long millis;
private LocalTimeOffset(long millis) {
this.millis = millis;
}
/**
* Convert a time in utc into a the local time at this offset.
*/
public final long utcToLocalTime(long utcMillis) {
return utcMillis + millis;
}
/**
* Convert a time in local millis to utc millis using <strong>this</strong> offset.
* <p>
* <strong>Important:</strong> Callers will rarely want to <strong>force</strong>
* using this offset and are instead instead interested in picking an appropriate
* offset for some local time that they have rounded down. In that case use
* {@link #localToUtc(long, Strategy)}.
*/
public final long localToUtcInThisOffset(long localMillis) {
return localMillis - millis;
}
/**
* Convert a local time that occurs during this offset or a previous
* offset to utc, providing a strategy for how to resolve "funny" cases.
* You can use this if you've converted from utc to local, rounded down,
* and then want to convert back to utc and you need fine control over
* how to handle the "funny" edges.
* <p>
* This will not help you if you must convert a local time that you've
* rounded <strong>up</strong>. For that you are on your own. May God
* have mercy on your soul.
*/
public abstract long localToUtc(long localMillis, Strategy strat);
public interface Strategy {
/**
* Handle a local time that never actually happened because a "gap"
* jumped over it. This happens in many time zones when folks wind
* their clocks forwards in the spring.
*
* @return the time in utc representing the local time
*/
long inGap(long localMillis, Gap gap);
/**
* Handle a local time that happened before the start of a gap.
*
* @return the time in utc representing the local time
*/
long beforeGap(long localMillis, Gap gap);
/**
* Handle a local time that happened twice because an "overlap"
* jumped behind it. This happens in many time zones when folks wind
* their clocks back in the fall.
*
* @return the time in utc representing the local time
*/
long inOverlap(long localMillis, Overlap overlap);
/**
* Handle a local time that happened before the start of an overlap.
*
* @return the time in utc representing the local time
*/
long beforeOverlap(long localMillis, Overlap overlap);
}
/**
* Does this offset contain the provided time?
*/
protected abstract boolean containsUtcMillis(long utcMillis);
/**
* Find the offset containing the provided time, first checking this
* offset, then its previous offset, the than one's previous offset, etc.
*/
protected abstract LocalTimeOffset offsetContaining(long utcMillis);
@Override
public String toString() {
return toString(millis);
}
protected abstract String toString(long millis);
/**
* How to get instances of {@link LocalTimeOffset}.
*/
public abstract static class Lookup {
/**
* Lookup the offset at the provided millis in utc.
*/
public abstract LocalTimeOffset lookup(long utcMillis);
/**
* If the offset for a range is constant then return it, otherwise
* return {@code null}.
*/
public abstract LocalTimeOffset fixedInRange(long minUtcMillis, long maxUtcMillis);
/**
* The number of offsets in the lookup. Package private for testing.
*/
abstract int size();
}
private static class NoPrevious extends LocalTimeOffset {
NoPrevious(long millis) {
super(millis);
}
@Override
public long localToUtc(long localMillis, Strategy strat) {
return localToUtcInThisOffset(localMillis);
}
@Override
protected boolean containsUtcMillis(long utcMillis) {
return true;
}
@Override
protected LocalTimeOffset offsetContaining(long utcMillis) {
/*
* Since there isn't a previous offset this offset *must* contain
* the provided time.
*/
return this;
}
@Override
protected String toString(long millis) {
return Long.toString(millis);
}
}
public abstract static class Transition extends LocalTimeOffset {
private final LocalTimeOffset previous;
private final long startUtcMillis;
private Transition(long millis, LocalTimeOffset previous, long startUtcMillis) {
super(millis);
this.previous = previous;
this.startUtcMillis = startUtcMillis;
}
/**
* The offset before the this one.
*/
public LocalTimeOffset previous() {
return previous;
}
@Override
protected final boolean containsUtcMillis(long utcMillis) {
return utcMillis >= startUtcMillis;
}
@Override
protected final LocalTimeOffset offsetContaining(long utcMillis) {
if (containsUtcMillis(utcMillis)) {
return this;
}
return previous.offsetContaining(utcMillis);
}
/**
* The time that this offset started in milliseconds since epoch.
*/
public long startUtcMillis() {
return startUtcMillis;
}
}
public static class Gap extends Transition {
private final long firstMissingLocalTime;
private final long firstLocalTimeAfterGap;
private Gap(long millis, LocalTimeOffset previous, long startUtcMillis, long firstMissingLocalTime, long firstLocalTimeAfterGap) {
super(millis, previous, startUtcMillis);
this.firstMissingLocalTime = firstMissingLocalTime;
this.firstLocalTimeAfterGap = firstLocalTimeAfterGap;
assert firstMissingLocalTime < firstLocalTimeAfterGap;
}
@Override
public long localToUtc(long localMillis, Strategy strat) {
if (localMillis >= firstLocalTimeAfterGap) {
return localToUtcInThisOffset(localMillis);
}
if (localMillis >= firstMissingLocalTime) {
return strat.inGap(localMillis, this);
}
return strat.beforeGap(localMillis, this);
}
/**
* The first time that is missing from the local time because of this gap.
*/
public long firstMissingLocalTime() {
return firstMissingLocalTime;
}
@Override
protected String toString(long millis) {
return "Gap of " + millis + "@" + Instant.ofEpochMilli(startUtcMillis());
}
}
public static class Overlap extends Transition {
private final long firstOverlappingLocalTime;
private final long firstNonOverlappingLocalTime;
private Overlap(long millis, LocalTimeOffset previous, long startUtcMillis,
long firstOverlappingLocalTime, long firstNonOverlappingLocalTime) {
super(millis, previous, startUtcMillis);
this.firstOverlappingLocalTime = firstOverlappingLocalTime;
this.firstNonOverlappingLocalTime = firstNonOverlappingLocalTime;
assert firstOverlappingLocalTime < firstNonOverlappingLocalTime;
}
@Override
public long localToUtc(long localMillis, Strategy strat) {
if (localMillis >= firstNonOverlappingLocalTime) {
return localToUtcInThisOffset(localMillis);
}
if (localMillis >= firstOverlappingLocalTime) {
return strat.inOverlap(localMillis, this);
}
return strat.beforeOverlap(localMillis, this);
}
/**
* The first local time after the overlap stops.
*/
public long firstNonOverlappingLocalTime() {
return firstNonOverlappingLocalTime;
}
/**
* The first local time to be appear twice.
*/
public long firstOverlappingLocalTime() {
return firstOverlappingLocalTime;
}
@Override
protected String toString(long millis) {
return "Overlap of " + millis + "@" + Instant.ofEpochMilli(startUtcMillis());
}
}
private static class FixedLookup extends Lookup {
private final ZoneId zone;
private final LocalTimeOffset fixed;
private FixedLookup(ZoneId zone, LocalTimeOffset fixed) {
this.zone = zone;
this.fixed = fixed;
}
@Override
public LocalTimeOffset lookup(long utcMillis) {
return fixed;
}
@Override
public LocalTimeOffset fixedInRange(long minUtcMillis, long maxUtcMillis) {
return fixed;
}
@Override
int size() {
return 1;
}
@Override
public String toString() {
return String.format(Locale.ROOT, "FixedLookup[for %s at %s]", zone, fixed);
}
}
/**
* Looks up transitions by checking whether the date is after the start
* of each transition. Simple so fast for small numbers of transitions.
*/
private static class LinkedListLookup extends AbstractManyTransitionsLookup {
private final LocalTimeOffset lastOffset;
private final int size;
LinkedListLookup(ZoneId zone, long minUtcMillis, long maxUtcMillis, List<ZoneOffsetTransition> transitions) {
super(zone, minUtcMillis, maxUtcMillis);
int size = 1;
LocalTimeOffset last = buildNoPrevious(transitions.get(0));
for (ZoneOffsetTransition t : transitions) {
last = buildTransition(t, last);
size++;
}
this.lastOffset = last;
this.size = size;
}
@Override
public LocalTimeOffset innerLookup(long utcMillis) {
return lastOffset.offsetContaining(utcMillis);
}
@Override
int size() {
return size;
}
}
/**
* Builds an array that can be {@link Arrays#binarySearch(long[], long)}ed
* for the daylight savings time transitions.
*/
private static class TransitionArrayLookup extends AbstractManyTransitionsLookup {
private final LocalTimeOffset[] offsets;
private final long[] transitionOutUtcMillis;
private TransitionArrayLookup(ZoneId zone, long minUtcMillis, long maxUtcMillis, List<ZoneOffsetTransition> transitions) {
super(zone, minUtcMillis, maxUtcMillis);
this.offsets = new LocalTimeOffset[transitions.size() + 1];
this.transitionOutUtcMillis = new long[transitions.size()];
this.offsets[0] = buildNoPrevious(transitions.get(0));
int i = 0;
for (ZoneOffsetTransition t : transitions) {
Transition transition = buildTransition(t, this.offsets[i]);
transitionOutUtcMillis[i] = transition.startUtcMillis();
i++;
this.offsets[i] = transition;
}
}
@Override
protected LocalTimeOffset innerLookup(long utcMillis) {
int index = Arrays.binarySearch(transitionOutUtcMillis, utcMillis);
if (index < 0) {
/*
* We're mostly not going to find the exact offset. Instead we'll
* end up at the "insertion point" for the utcMillis. We have no
* plans to insert utcMillis in the array, but the offset that
* contains utcMillis happens to be "insertion point" - 1.
*/
index = -index - 1;
} else {
index++;
}
assert index < offsets.length : "binarySearch did something weird";
return offsets[index];
}
@Override
int size() {
return offsets.length;
}
@Override
public String toString() {
return String.format(Locale.ROOT, "TransitionArrayLookup[for %s between %s and %s]",
zone, Instant.ofEpochMilli(minUtcMillis), Instant.ofEpochMilli(maxUtcMillis));
}
}
private abstract static class AbstractManyTransitionsLookup extends Lookup {
protected final ZoneId zone;
protected final long minUtcMillis;
protected final long maxUtcMillis;
AbstractManyTransitionsLookup(ZoneId zone, long minUtcMillis, long maxUtcMillis) {
this.zone = zone;
this.minUtcMillis = minUtcMillis;
this.maxUtcMillis = maxUtcMillis;
}
@Override
public final LocalTimeOffset lookup(long utcMillis) {
assert utcMillis >= minUtcMillis;
assert utcMillis <= maxUtcMillis;
return innerLookup(utcMillis);
}
protected abstract LocalTimeOffset innerLookup(long utcMillis);
@Override
public final LocalTimeOffset fixedInRange(long minUtcMillis, long maxUtcMillis) {
LocalTimeOffset offset = lookup(maxUtcMillis);
return offset.containsUtcMillis(minUtcMillis) ? offset : null;
}
protected static NoPrevious buildNoPrevious(ZoneOffsetTransition transition) {
return new NoPrevious(transition.getOffsetBefore().getTotalSeconds() * 1000);
}
protected static Transition buildTransition(ZoneOffsetTransition transition, LocalTimeOffset previous) {
long utcStart = transition.toEpochSecond() * 1000;
long offsetBeforeMillis = transition.getOffsetBefore().getTotalSeconds() * 1000;
long offsetAfterMillis = transition.getOffsetAfter().getTotalSeconds() * 1000;
if (transition.isGap()) {
long firstMissingLocalTime = utcStart + offsetBeforeMillis;
long firstLocalTimeAfterGap = utcStart + offsetAfterMillis;
return new Gap(offsetAfterMillis, previous, utcStart, firstMissingLocalTime, firstLocalTimeAfterGap);
}
long firstOverlappingLocalTime = utcStart + offsetAfterMillis;
long firstNonOverlappingLocalTime = utcStart + offsetBeforeMillis;
return new Overlap(offsetAfterMillis, previous, utcStart, firstOverlappingLocalTime, firstNonOverlappingLocalTime);
}
}
private static LocalTimeOffset checkForFixedZone(ZoneId zone, ZoneRules rules) {
if (false == rules.isFixedOffset()) {
return null;
}
LocalTimeOffset fixedTransition = new NoPrevious(rules.getOffset(Instant.EPOCH).getTotalSeconds() * 1000);
return fixedTransition;
}
/**
* The maximum number of {@link ZoneOffsetTransition} to collect before
* giving up because the date range will be "too big". I picked this number
* fairly arbitrarily with the following goals:
* <ol>
* <li>Don't let {@code lookup(Long.MIN_VALUE, Long.MAX_VALUE)} consume all
* the memory in the JVM.
* <li>It should be much larger than the number of offsets I'm bound to
* collect.
* </ol>
* {@code 5_000} collects about 2_500 years worth offsets which feels like
* quite a few!
*/
private static final int MAX_TRANSITIONS = 5000;
/**
* Collect transitions from the provided rules for the provided date range
* into a list we can reason about. If we'd collect more than
* {@link #MAX_TRANSITIONS} rules we'll abort, returning {@code null}
* signaling that {@link LocalTimeOffset} is probably not the implementation
* to use in this case.
* <p>
* {@link ZoneRules} gives us access to the local time transition database
* with two method: {@link ZoneRules#getTransitions()} for "fully defined"
* transitions and {@link ZoneRules#getTransitionRules()}. This first one
* is a list of transitions and when the they happened. To get the full
* picture of transitions you pick up from where that one leaves off using
* the rules, which are basically factories that you give the year in local
* time to build a transition for that year.
* <p>
* This method collects all of the {@link ZoneRules#getTransitions()} that
* are relevant for the date range and, if our range extends past the last
* transition, calls
* {@link #buildTransitionsFromRules(List, ZoneId, ZoneRules, long, long)}
* to build the remaining transitions to fully describe the range.
*/
private static List<ZoneOffsetTransition> collectTransitions(ZoneId zone, ZoneRules rules, long minUtcMillis, long maxUtcMillis) {
long minSecond = minUtcMillis / 1000;
long maxSecond = maxUtcMillis / 1000;
List<ZoneOffsetTransition> transitions = new ArrayList<>();
ZoneOffsetTransition t = null;
Iterator<ZoneOffsetTransition> itr = rules.getTransitions().iterator();
// Skip all transitions that are before our start time
while (itr.hasNext() && (t = itr.next()).toEpochSecond() < minSecond) {}
if (false == itr.hasNext()) {
if (minSecond < t.toEpochSecond() && t.toEpochSecond() < maxSecond) {
transitions.add(t);
}
transitions = buildTransitionsFromRules(transitions, zone, rules, minSecond, maxSecond);
if (transitions != null && transitions.isEmpty()) {
/*
* If there aren't any rules and we haven't accumulated
* any transitions then we grab the last one we saw so we
* have some knowledge of the offset.
*/
transitions.add(t);
}
return transitions;
}
transitions.add(t);
while (itr.hasNext()) {
t = itr.next();
if (t.toEpochSecond() > maxSecond) {
return transitions;
}
transitions.add(t);
if (transitions.size() > MAX_TRANSITIONS) {
return null;
}
}
return buildTransitionsFromRules(transitions, zone, rules, t.toEpochSecond() + 1, maxSecond);
}
/**
* Build transitions for every year in our range from the rules
* stored in {@link ZoneRules#getTransitionRules()}.
*/
private static List<ZoneOffsetTransition> buildTransitionsFromRules(List<ZoneOffsetTransition> transitions,
ZoneId zone, ZoneRules rules, long minSecond, long maxSecond) {
List<ZoneOffsetTransitionRule> transitionRules = rules.getTransitionRules();
if (transitionRules.isEmpty()) {
/*
* Zones like Asia/Kathmandu don't have any rules so we don't
* need to do any of this.
*/
return transitions;
}
int minYear = Instant.ofEpochSecond(minSecond).atZone(zone).toLocalDate().getYear();
int maxYear = Instant.ofEpochSecond(maxSecond).atZone(zone).toLocalDate().getYear();
/*
* Record only the rules from the current year that are greater
* than the minSecond so we don't go back in time when coming from
* a fixed transition.
*/
ZoneOffsetTransition lastTransitionFromMinYear = null;
for (ZoneOffsetTransitionRule rule : transitionRules) {
lastTransitionFromMinYear = rule.createTransition(minYear);
if (lastTransitionFromMinYear.toEpochSecond() < minSecond) {
continue;
}
transitions.add(lastTransitionFromMinYear);
if (transitions.size() > MAX_TRANSITIONS) {
return null;
}
}
if (minYear == maxYear) {
if (transitions.isEmpty()) {
// Make sure we have *some* transition to work with.
transitions.add(lastTransitionFromMinYear);
}
return transitions;
}
// Now build transitions for all of the remaining years.
minYear++;
if (transitions.size() + (maxYear - minYear) * transitionRules.size() > MAX_TRANSITIONS) {
return null;
}
for (int year = minYear; year <= maxYear; year++) {
for (ZoneOffsetTransitionRule rule : transitionRules) {
transitions.add(rule.createTransition(year));
}
}
return transitions;
}
}

View File

@ -20,6 +20,8 @@ package org.elasticsearch.common;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.LocalTimeOffset.Gap;
import org.elasticsearch.common.LocalTimeOffset.Overlap;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -44,60 +46,105 @@ import java.time.zone.ZoneOffsetTransition;
import java.time.zone.ZoneRules; import java.time.zone.ZoneRules;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.TimeUnit;
/** /**
* A strategy for rounding date/time based values. * A strategy for rounding milliseconds since epoch.
* * <p>
* There are two implementations for rounding. * There are two implementations for rounding.
* The first one requires a date time unit and rounds to the supplied date time unit (i.e. quarter of year, day of month) * The first one requires a date time unit and rounds to the supplied date time unit (i.e. quarter of year, day of month).
* The second one allows you to specify an interval to round to * The second one allows you to specify an interval to round to.
* <p>
* See <a href="https://davecturner.github.io/2019/04/14/timezone-rounding.html">this</a>
* blog for some background reading. Its super interesting and the links are
* a comedy gold mine. If you like time zones. Or hate them.
*/ */
public abstract class Rounding implements Writeable { public abstract class Rounding implements Writeable {
public enum DateTimeUnit { public enum DateTimeUnit {
WEEK_OF_WEEKYEAR((byte) 1, IsoFields.WEEK_OF_WEEK_BASED_YEAR) { WEEK_OF_WEEKYEAR((byte) 1, IsoFields.WEEK_OF_WEEK_BASED_YEAR) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(7);
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundWeekOfWeekYear(utcMillis); return DateUtils.roundWeekOfWeekYear(utcMillis);
} }
@Override
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
}, },
YEAR_OF_CENTURY((byte) 2, ChronoField.YEAR_OF_ERA) { YEAR_OF_CENTURY((byte) 2, ChronoField.YEAR_OF_ERA) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(366);
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundYear(utcMillis); return DateUtils.roundYear(utcMillis);
} }
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
}, },
QUARTER_OF_YEAR((byte) 3, IsoFields.QUARTER_OF_YEAR) { QUARTER_OF_YEAR((byte) 3, IsoFields.QUARTER_OF_YEAR) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(92);
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundQuarterOfYear(utcMillis); return DateUtils.roundQuarterOfYear(utcMillis);
} }
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
}, },
MONTH_OF_YEAR((byte) 4, ChronoField.MONTH_OF_YEAR) { MONTH_OF_YEAR((byte) 4, ChronoField.MONTH_OF_YEAR) {
private final long extraLocalOffsetLookup = TimeUnit.DAYS.toMillis(31);
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundMonthOfYear(utcMillis); return DateUtils.roundMonthOfYear(utcMillis);
} }
long extraLocalOffsetLookup() {
return extraLocalOffsetLookup;
}
}, },
DAY_OF_MONTH((byte) 5, ChronoField.DAY_OF_MONTH) { DAY_OF_MONTH((byte) 5, ChronoField.DAY_OF_MONTH) {
final long unitMillis = ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis(); final long unitMillis = ChronoField.DAY_OF_MONTH.getBaseUnit().getDuration().toMillis();
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, unitMillis); return DateUtils.roundFloor(utcMillis, unitMillis);
} }
long extraLocalOffsetLookup() {
return unitMillis;
}
}, },
HOUR_OF_DAY((byte) 6, ChronoField.HOUR_OF_DAY) { HOUR_OF_DAY((byte) 6, ChronoField.HOUR_OF_DAY) {
final long unitMillis = ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis(); final long unitMillis = ChronoField.HOUR_OF_DAY.getBaseUnit().getDuration().toMillis();
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, unitMillis); return DateUtils.roundFloor(utcMillis, unitMillis);
} }
long extraLocalOffsetLookup() {
return unitMillis;
}
}, },
MINUTES_OF_HOUR((byte) 7, ChronoField.MINUTE_OF_HOUR) { MINUTES_OF_HOUR((byte) 7, ChronoField.MINUTE_OF_HOUR) {
final long unitMillis = ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis(); final long unitMillis = ChronoField.MINUTE_OF_HOUR.getBaseUnit().getDuration().toMillis();
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, unitMillis); return DateUtils.roundFloor(utcMillis, unitMillis);
} }
long extraLocalOffsetLookup() {
return unitMillis;
}
}, },
SECOND_OF_MINUTE((byte) 8, ChronoField.SECOND_OF_MINUTE) { SECOND_OF_MINUTE((byte) 8, ChronoField.SECOND_OF_MINUTE) {
final long unitMillis = ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis(); final long unitMillis = ChronoField.SECOND_OF_MINUTE.getBaseUnit().getDuration().toMillis();
long roundFloor(long utcMillis) { long roundFloor(long utcMillis) {
return DateUtils.roundFloor(utcMillis, unitMillis); return DateUtils.roundFloor(utcMillis, unitMillis);
} }
long extraLocalOffsetLookup() {
return unitMillis;
}
}; };
private final byte id; private final byte id;
@ -117,6 +164,14 @@ public abstract class Rounding implements Writeable {
*/ */
abstract long roundFloor(long utcMillis); abstract long roundFloor(long utcMillis);
/**
* When looking up {@link LocalTimeOffset} go this many milliseconds
* in the past from the minimum millis since epoch that we plan to
* look up so that we can see transitions that we might have rounded
* down beyond.
*/
abstract long extraLocalOffsetLookup();
public byte getId() { public byte getId() {
return id; return id;
} }
@ -151,18 +206,58 @@ public abstract class Rounding implements Writeable {
public abstract byte id(); public abstract byte id();
/** /**
* Rounds the given value. * A strategy for rounding milliseconds since epoch.
*/ */
public abstract long round(long value); public interface Prepared {
/**
* Rounds the given value.
*/
long round(long utcMillis);
/**
* Given the rounded value (which was potentially generated by
* {@link #round(long)}, returns the next rounding value. For
* example, with interval based rounding, if the interval is
* 3, {@code nextRoundValue(6) = 9}.
*/
long nextRoundingValue(long utcMillis);
}
/**
* Prepare to round many times.
*/
public abstract Prepared prepare(long minUtcMillis, long maxUtcMillis);
/** /**
* Given the rounded value (which was potentially generated by {@link #round(long)}, returns the next rounding value. For example, with * Prepare to round many dates over an unknown range. Prefer
* interval based rounding, if the interval is 3, {@code nextRoundValue(6) = 9 }. * {@link #prepare(long, long)} if you can find the range because
* * it'll be much more efficient.
* @param value The current rounding value
* @return The next rounding value
*/ */
public abstract long nextRoundingValue(long value); public abstract Prepared prepareForUnknown();
/**
* Prepare rounding using java time classes. Package private for testing.
*/
abstract Prepared prepareJavaTime();
/**
* Rounds the given value.
* <p>
* Prefer {@link #prepare(long, long)} if rounding many values.
*/
public final long round(long utcMillis) {
return prepare(utcMillis, utcMillis).round(utcMillis);
}
/**
* Given the rounded value (which was potentially generated by
* {@link #round(long)}, returns the next rounding value. For
* example, with interval based rounding, if the interval is
* 3, {@code nextRoundValue(6) = 9}.
* <p>
* Prefer {@link #prepare(long, long)} if rounding many values.
*/
public final long nextRoundingValue(long utcMillis) {
return prepare(utcMillis, utcMillis).nextRoundingValue(utcMillis);
}
/** /**
* How "offset" this rounding is from the traditional "start" of the period. * How "offset" this rounding is from the traditional "start" of the period.
@ -245,23 +340,16 @@ public abstract class Rounding implements Writeable {
} }
static class TimeUnitRounding extends Rounding { static class TimeUnitRounding extends Rounding {
static final byte ID = 1; static final byte ID = 1;
/** Since, there is no offset of -1 ms, it is safe to use -1 for non-fixed timezones */
static final long TZ_OFFSET_NON_FIXED = -1;
private final DateTimeUnit unit; private final DateTimeUnit unit;
private final ZoneId timeZone; private final ZoneId timeZone;
private final boolean unitRoundsToMidnight; private final boolean unitRoundsToMidnight;
/** For fixed offset time zones, this is the offset in milliseconds, otherwise TZ_OFFSET_NON_FIXED */
private final long fixedOffsetMillis;
TimeUnitRounding(DateTimeUnit unit, ZoneId timeZone) { TimeUnitRounding(DateTimeUnit unit, ZoneId timeZone) {
this.unit = unit; this.unit = unit;
this.timeZone = timeZone; this.timeZone = timeZone;
this.unitRoundsToMidnight = this.unit.field.getBaseUnit().getDuration().toMillis() > 3600000L; this.unitRoundsToMidnight = this.unit.field.getBaseUnit().getDuration().toMillis() > 3600000L;
this.fixedOffsetMillis = timeZone.getRules().isFixedOffset() ?
timeZone.getRules().getOffset(Instant.EPOCH).getTotalSeconds() * 1000 : TZ_OFFSET_NON_FIXED;
} }
TimeUnitRounding(StreamInput in) throws IOException { TimeUnitRounding(StreamInput in) throws IOException {
@ -319,123 +407,59 @@ public abstract class Rounding implements Writeable {
} }
@Override @Override
public long round(long utcMillis) { public Prepared prepare(long minUtcMillis, long maxUtcMillis) {
// This works as long as the tz offset doesn't change. It is worth getting this case out of the way first, long minLookup = minUtcMillis - unit.extraLocalOffsetLookup();
// as the calculations for fixing things near to offset changes are a little expensive and unnecessary long maxLookup = maxUtcMillis;
// in the common case of working with fixed offset timezones (such as UTC).
if (fixedOffsetMillis != TZ_OFFSET_NON_FIXED) { long unitMillis = 0;
long localMillis = utcMillis + fixedOffsetMillis; if (false == unitRoundsToMidnight) {
return unit.roundFloor(localMillis) - fixedOffsetMillis; /*
* Units that round to midnight can round down from two
* units worth of millis in the future to find the
* nextRoundingValue.
*/
unitMillis = unit.field.getBaseUnit().getDuration().toMillis();
maxLookup += 2 * unitMillis;
}
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(timeZone, minLookup, maxLookup);
if (lookup == null) {
// Range too long, just use java.time
return prepareJavaTime();
}
LocalTimeOffset fixedOffset = lookup.fixedInRange(minLookup, maxLookup);
if (fixedOffset != null) {
// The time zone is effectively fixed
if (unitRoundsToMidnight) {
return new FixedToMidnightRounding(fixedOffset);
}
return new FixedNotToMidnightRounding(fixedOffset, unitMillis);
} }
Instant instant = Instant.ofEpochMilli(utcMillis);
if (unitRoundsToMidnight) { if (unitRoundsToMidnight) {
final LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, timeZone); return new ToMidnightRounding(lookup);
final LocalDateTime localMidnight = truncateLocalDateTime(localDateTime);
return firstTimeOnDay(localMidnight);
} else {
final ZoneRules rules = timeZone.getRules();
while (true) {
final Instant truncatedTime = truncateAsLocalTime(instant, rules);
final ZoneOffsetTransition previousTransition = rules.previousTransition(instant);
if (previousTransition == null) {
// truncateAsLocalTime cannot have failed if there were no previous transitions
return truncatedTime.toEpochMilli();
}
Instant previousTransitionInstant = previousTransition.getInstant();
if (truncatedTime != null && previousTransitionInstant.compareTo(truncatedTime) < 1) {
return truncatedTime.toEpochMilli();
}
// There was a transition in between the input time and the truncated time. Return to the transition time and
// round that down instead.
instant = previousTransitionInstant.minusNanos(1_000_000);
}
}
}
private long firstTimeOnDay(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "firstTimeOnDay should only be called at midnight";
assert unitRoundsToMidnight : "firstTimeOnDay should only be called if unitRoundsToMidnight";
// Now work out what localMidnight actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(localMidnight);
if (currentOffsets.isEmpty() == false) {
// There is at least one midnight on this day, so choose the first
final ZoneOffset firstOffset = currentOffsets.get(0);
final OffsetDateTime offsetMidnight = localMidnight.atOffset(firstOffset);
return offsetMidnight.toInstant().toEpochMilli();
} else {
// There were no midnights on this day, so we must have entered the day via an offset transition.
// Use the time of the transition as it is the earliest time on the right day.
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(localMidnight);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
private Instant truncateAsLocalTime(Instant instant, final ZoneRules rules) {
assert unitRoundsToMidnight == false : "truncateAsLocalTime should not be called if unitRoundsToMidnight";
LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, timeZone);
final LocalDateTime truncatedLocalDateTime = truncateLocalDateTime(localDateTime);
final List<ZoneOffset> currentOffsets = rules.getValidOffsets(truncatedLocalDateTime);
if (currentOffsets.isEmpty() == false) {
// at least one possibilities - choose the latest one that's still no later than the input time
for (int offsetIndex = currentOffsets.size() - 1; offsetIndex >= 0; offsetIndex--) {
final Instant result = truncatedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)).toInstant();
if (result.isAfter(instant) == false) {
return result;
}
}
assert false : "rounded time not found for " + instant + " with " + this;
return null;
} else {
// The chosen local time didn't happen. This means we were given a time in an hour (or a minute) whose start
// is missing due to an offset transition, so the time cannot be truncated.
return null;
}
}
private LocalDateTime nextRelevantMidnight(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.MIDNIGHT) : "nextRelevantMidnight should only be called at midnight";
assert unitRoundsToMidnight : "firstTimeOnDay should only be called if unitRoundsToMidnight";
switch (unit) {
case DAY_OF_MONTH:
return localMidnight.plus(1, ChronoUnit.DAYS);
case WEEK_OF_WEEKYEAR:
return localMidnight.plus(7, ChronoUnit.DAYS);
case MONTH_OF_YEAR:
return localMidnight.plus(1, ChronoUnit.MONTHS);
case QUARTER_OF_YEAR:
return localMidnight.plus(3, ChronoUnit.MONTHS);
case YEAR_OF_CENTURY:
return localMidnight.plus(1, ChronoUnit.YEARS);
default:
throw new IllegalArgumentException("Unknown round-to-midnight unit: " + unit);
} }
return new NotToMidnightRounding(lookup, unitMillis);
} }
@Override @Override
public long nextRoundingValue(long utcMillis) { public Prepared prepareForUnknown() {
if (unitRoundsToMidnight) { LocalTimeOffset offset = LocalTimeOffset.lookupFixedOffset(timeZone);
final LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone); if (offset != null) {
final LocalDateTime earlierLocalMidnight = truncateLocalDateTime(localDateTime); if (unitRoundsToMidnight) {
final LocalDateTime localMidnight = nextRelevantMidnight(earlierLocalMidnight); return new FixedToMidnightRounding(offset);
return firstTimeOnDay(localMidnight);
} else {
final long unitSize = unit.field.getBaseUnit().getDuration().toMillis();
final long roundedAfterOneIncrement = round(utcMillis + unitSize);
if (utcMillis < roundedAfterOneIncrement) {
return roundedAfterOneIncrement;
} else {
return round(utcMillis + 2 * unitSize);
} }
return new FixedNotToMidnightRounding(offset, unit.field.getBaseUnit().getDuration().toMillis());
} }
return prepareJavaTime();
}
@Override
Prepared prepareJavaTime() {
if (unitRoundsToMidnight) {
return new JavaTimeToMidnightRounding();
}
return new JavaTimeNotToMidnightRounding(unit.field.getBaseUnit().getDuration().toMillis());
} }
@Override @Override
@ -469,6 +493,253 @@ public abstract class Rounding implements Writeable {
public String toString() { public String toString() {
return "Rounding[" + unit + " in " + timeZone + "]"; return "Rounding[" + unit + " in " + timeZone + "]";
} }
private class FixedToMidnightRounding implements Prepared {
private final LocalTimeOffset offset;
FixedToMidnightRounding(LocalTimeOffset offset) {
this.offset = offset;
}
@Override
public long round(long utcMillis) {
return offset.localToUtcInThisOffset(unit.roundFloor(offset.utcToLocalTime(utcMillis)));
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is used in date range's collect so we should optimize it too
return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis);
}
}
private class FixedNotToMidnightRounding implements Prepared {
private final LocalTimeOffset offset;
private final long unitMillis;
FixedNotToMidnightRounding(LocalTimeOffset offset, long unitMillis) {
this.offset = offset;
this.unitMillis = unitMillis;
}
@Override
public long round(long utcMillis) {
return offset.localToUtcInThisOffset(unit.roundFloor(offset.utcToLocalTime(utcMillis)));
}
@Override
public final long nextRoundingValue(long utcMillis) {
return round(utcMillis + unitMillis);
}
}
private class ToMidnightRounding implements Prepared, LocalTimeOffset.Strategy {
private final LocalTimeOffset.Lookup lookup;
ToMidnightRounding(LocalTimeOffset.Lookup lookup) {
this.lookup = lookup;
}
@Override
public long round(long utcMillis) {
LocalTimeOffset offset = lookup.lookup(utcMillis);
return offset.localToUtc(unit.roundFloor(offset.utcToLocalTime(utcMillis)), this);
}
@Override
public long nextRoundingValue(long utcMillis) {
// TODO this is actually used date range's collect so we should optimize it
return new JavaTimeToMidnightRounding().nextRoundingValue(utcMillis);
}
@Override
public long inGap(long localMillis, Gap gap) {
return gap.startUtcMillis();
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return gap.previous().localToUtc(localMillis, this);
};
@Override
public long inOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(localMillis, this);
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
return overlap.previous().localToUtc(localMillis, this);
};
}
private class NotToMidnightRounding extends AbstractNotToMidnightRounding implements LocalTimeOffset.Strategy {
private final LocalTimeOffset.Lookup lookup;
NotToMidnightRounding(LocalTimeOffset.Lookup lookup, long unitMillis) {
super(unitMillis);
this.lookup = lookup;
}
@Override
public long round(long utcMillis) {
LocalTimeOffset offset = lookup.lookup(utcMillis);
long roundedLocalMillis = unit.roundFloor(offset.utcToLocalTime(utcMillis));
return offset.localToUtc(roundedLocalMillis, this);
}
@Override
public long inGap(long localMillis, Gap gap) {
// Round from just before the start of the gap
return gap.previous().localToUtc(unit.roundFloor(gap.firstMissingLocalTime() - 1), this);
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return inGap(localMillis, gap);
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
// Convert the overlap at this offset because that'll produce the largest result.
return overlap.localToUtcInThisOffset(localMillis);
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
if (overlap.firstNonOverlappingLocalTime() - overlap.firstOverlappingLocalTime() >= unitMillis) {
return overlap.localToUtcInThisOffset(localMillis);
}
return overlap.previous().localToUtc(localMillis, this); // This is mostly for Asia/Lord_Howe
}
}
private class JavaTimeToMidnightRounding implements Prepared {
@Override
public long round(long utcMillis) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone);
LocalDateTime localMidnight = truncateLocalDateTime(localDateTime);
return firstTimeOnDay(localMidnight);
}
@Override
public long nextRoundingValue(long utcMillis) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone);
LocalDateTime earlierLocalMidnight = truncateLocalDateTime(localDateTime);
LocalDateTime localMidnight = nextRelevantMidnight(earlierLocalMidnight);
return firstTimeOnDay(localMidnight);
}
private long firstTimeOnDay(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "firstTimeOnDay should only be called at midnight";
// Now work out what localMidnight actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(localMidnight);
if (currentOffsets.isEmpty() == false) {
// There is at least one midnight on this day, so choose the first
final ZoneOffset firstOffset = currentOffsets.get(0);
final OffsetDateTime offsetMidnight = localMidnight.atOffset(firstOffset);
return offsetMidnight.toInstant().toEpochMilli();
} else {
// There were no midnights on this day, so we must have entered the day via an offset transition.
// Use the time of the transition as it is the earliest time on the right day.
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(localMidnight);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
private LocalDateTime nextRelevantMidnight(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.MIDNIGHT) : "nextRelevantMidnight should only be called at midnight";
switch (unit) {
case DAY_OF_MONTH:
return localMidnight.plus(1, ChronoUnit.DAYS);
case WEEK_OF_WEEKYEAR:
return localMidnight.plus(7, ChronoUnit.DAYS);
case MONTH_OF_YEAR:
return localMidnight.plus(1, ChronoUnit.MONTHS);
case QUARTER_OF_YEAR:
return localMidnight.plus(3, ChronoUnit.MONTHS);
case YEAR_OF_CENTURY:
return localMidnight.plus(1, ChronoUnit.YEARS);
default:
throw new IllegalArgumentException("Unknown round-to-midnight unit: " + unit);
}
}
}
private class JavaTimeNotToMidnightRounding extends AbstractNotToMidnightRounding {
JavaTimeNotToMidnightRounding(long unitMillis) {
super(unitMillis);
}
@Override
public long round(long utcMillis) {
Instant instant = Instant.ofEpochMilli(utcMillis);
final ZoneRules rules = timeZone.getRules();
while (true) {
final Instant truncatedTime = truncateAsLocalTime(instant, rules);
final ZoneOffsetTransition previousTransition = rules.previousTransition(instant);
if (previousTransition == null) {
// truncateAsLocalTime cannot have failed if there were no previous transitions
return truncatedTime.toEpochMilli();
}
Instant previousTransitionInstant = previousTransition.getInstant();
if (truncatedTime != null && previousTransitionInstant.compareTo(truncatedTime) < 1) {
return truncatedTime.toEpochMilli();
}
// There was a transition in between the input time and the truncated time. Return to the transition time and
// round that down instead.
instant = previousTransitionInstant.minusNanos(1_000_000);
}
}
private Instant truncateAsLocalTime(Instant instant, final ZoneRules rules) {
assert unitRoundsToMidnight == false : "truncateAsLocalTime should not be called if unitRoundsToMidnight";
LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, timeZone);
final LocalDateTime truncatedLocalDateTime = truncateLocalDateTime(localDateTime);
final List<ZoneOffset> currentOffsets = rules.getValidOffsets(truncatedLocalDateTime);
if (currentOffsets.isEmpty() == false) {
// at least one possibilities - choose the latest one that's still no later than the input time
for (int offsetIndex = currentOffsets.size() - 1; offsetIndex >= 0; offsetIndex--) {
final Instant result = truncatedLocalDateTime.atOffset(currentOffsets.get(offsetIndex)).toInstant();
if (result.isAfter(instant) == false) {
return result;
}
}
assert false : "rounded time not found for " + instant + " with " + this;
return null;
} else {
// The chosen local time didn't happen. This means we were given a time in an hour (or a minute) whose start
// is missing due to an offset transition, so the time cannot be truncated.
return null;
}
}
}
private abstract class AbstractNotToMidnightRounding implements Prepared {
protected final long unitMillis;
AbstractNotToMidnightRounding(long unitMillis) {
this.unitMillis = unitMillis;
}
@Override
public final long nextRoundingValue(long utcMillis) {
final long roundedAfterOneIncrement = round(utcMillis + unitMillis);
if (utcMillis < roundedAfterOneIncrement) {
return roundedAfterOneIncrement;
} else {
return round(utcMillis + 2 * unitMillis);
}
}
}
} }
static class TimeIntervalRounding extends Rounding { static class TimeIntervalRounding extends Rounding {
@ -511,72 +782,89 @@ public abstract class Rounding implements Writeable {
} }
@Override @Override
public long round(final long utcMillis) { public Prepared prepare(long minUtcMillis, long maxUtcMillis) {
// This works as long as the tz offset doesn't change. It is worth getting this case out of the way first, return prepareForUnknown();
// as the calculations for fixing things near to offset changes are a little expensive and unnecessary
// in the common case of working with fixed offset timezones (such as UTC).
if (fixedOffsetMillis != TZ_OFFSET_NON_FIXED) {
long localMillis = utcMillis + fixedOffsetMillis;
return (roundKey(localMillis, interval) * interval) - fixedOffsetMillis;
}
final Instant utcInstant = Instant.ofEpochMilli(utcMillis);
final LocalDateTime rawLocalDateTime = LocalDateTime.ofInstant(utcInstant, timeZone);
// a millisecond value with the same local time, in UTC, as `utcMillis` has in `timeZone`
final long localMillis = utcMillis + timeZone.getRules().getOffset(utcInstant).getTotalSeconds() * 1000;
assert localMillis == rawLocalDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
final long roundedMillis = roundKey(localMillis, interval) * interval;
final LocalDateTime roundedLocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(roundedMillis), ZoneOffset.UTC);
// Now work out what roundedLocalDateTime actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(roundedLocalDateTime);
if (currentOffsets.isEmpty() == false) {
// There is at least one instant with the desired local time. In general the desired result is
// the latest rounded time that's no later than the input time, but this could involve rounding across
// a timezone transition, which may yield the wrong result
final ZoneOffsetTransition previousTransition = timeZone.getRules().previousTransition(utcInstant.plusMillis(1));
for (int offsetIndex = currentOffsets.size() - 1; 0 <= offsetIndex; offsetIndex--) {
final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(offsetIndex));
final Instant offsetInstant = offsetTime.toInstant();
if (previousTransition != null && offsetInstant.isBefore(previousTransition.getInstant())) {
// Rounding down across the transition can yield the wrong result. It's best to return to the transition time
// and round that down.
return round(previousTransition.getInstant().toEpochMilli() - 1);
}
if (utcInstant.isBefore(offsetTime.toInstant()) == false) {
return offsetInstant.toEpochMilli();
}
}
final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(0));
final Instant offsetInstant = offsetTime.toInstant();
assert false : this + " failed to round " + utcMillis + " down: " + offsetInstant + " is the earliest possible";
return offsetInstant.toEpochMilli(); // TODO or throw something?
} else {
// The desired time isn't valid because within a gap, so just return the gap time.
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(roundedLocalDateTime);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
private static long roundKey(long value, long interval) {
if (value < 0) {
return (value - interval + 1) / interval;
} else {
return value / interval;
}
} }
@Override @Override
public long nextRoundingValue(long time) { public Prepared prepareForUnknown() {
int offsetSeconds = timeZone.getRules().getOffset(Instant.ofEpochMilli(time)).getTotalSeconds(); return prepareJavaTime();
long millis = time + interval + offsetSeconds * 1000; }
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC)
.withZoneSameLocal(timeZone) @Override
.toInstant().toEpochMilli(); Prepared prepareJavaTime() {
return new Prepared() {
@Override
public long round(long utcMillis) {
if (fixedOffsetMillis != TZ_OFFSET_NON_FIXED) {
// This works as long as the tz offset doesn't change. It is worth getting this case out of the way first,
// as the calculations for fixing things near to offset changes are a little expensive and unnecessary
// in the common case of working with fixed offset timezones (such as UTC).
long localMillis = utcMillis + fixedOffsetMillis;
return (roundKey(localMillis, interval) * interval) - fixedOffsetMillis;
}
final Instant utcInstant = Instant.ofEpochMilli(utcMillis);
final LocalDateTime rawLocalDateTime = LocalDateTime.ofInstant(utcInstant, timeZone);
// a millisecond value with the same local time, in UTC, as `utcMillis` has in `timeZone`
final long localMillis = utcMillis + timeZone.getRules().getOffset(utcInstant).getTotalSeconds() * 1000;
assert localMillis == rawLocalDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
final long roundedMillis = roundKey(localMillis, interval) * interval;
final LocalDateTime roundedLocalDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(roundedMillis), ZoneOffset.UTC);
// Now work out what roundedLocalDateTime actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(roundedLocalDateTime);
if (currentOffsets.isEmpty() == false) {
// There is at least one instant with the desired local time. In general the desired result is
// the latest rounded time that's no later than the input time, but this could involve rounding across
// a timezone transition, which may yield the wrong result
final ZoneOffsetTransition previousTransition = timeZone.getRules().previousTransition(utcInstant.plusMillis(1));
for (int offsetIndex = currentOffsets.size() - 1; 0 <= offsetIndex; offsetIndex--) {
final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(offsetIndex));
final Instant offsetInstant = offsetTime.toInstant();
if (previousTransition != null && offsetInstant.isBefore(previousTransition.getInstant())) {
/*
* Rounding down across the transition can yield the
* wrong result. It's best to return to the transition
* time and round that down.
*/
return round(previousTransition.getInstant().toEpochMilli() - 1);
}
if (utcInstant.isBefore(offsetTime.toInstant()) == false) {
return offsetInstant.toEpochMilli();
}
}
final OffsetDateTime offsetTime = roundedLocalDateTime.atOffset(currentOffsets.get(0));
final Instant offsetInstant = offsetTime.toInstant();
assert false : this + " failed to round " + utcMillis + " down: " + offsetInstant + " is the earliest possible";
return offsetInstant.toEpochMilli(); // TODO or throw something?
} else {
// The desired time isn't valid because within a gap, so just return the gap time.
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(roundedLocalDateTime);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
@Override
public long nextRoundingValue(long time) {
int offsetSeconds = timeZone.getRules().getOffset(Instant.ofEpochMilli(time)).getTotalSeconds();
long millis = time + interval + offsetSeconds * 1000;
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC)
.withZoneSameLocal(timeZone)
.toInstant().toEpochMilli();
}
private long roundKey(long value, long interval) {
if (value < 0) {
return (value - interval + 1) / interval;
} else {
return value / interval;
}
}
};
} }
@Override @Override
@ -644,13 +932,32 @@ public abstract class Rounding implements Writeable {
} }
@Override @Override
public long round(long value) { public Prepared prepare(long minUtcMillis, long maxUtcMillis) {
return delegate.round(value - offset) + offset; return wrapPreparedRounding(delegate.prepare(minUtcMillis, maxUtcMillis));
} }
@Override @Override
public long nextRoundingValue(long value) { public Prepared prepareForUnknown() {
return delegate.nextRoundingValue(value - offset) + offset; return wrapPreparedRounding(delegate.prepareForUnknown());
}
@Override
Prepared prepareJavaTime() {
return wrapPreparedRounding(delegate.prepareJavaTime());
}
private Prepared wrapPreparedRounding(Prepared delegatePrepared) {
return new Prepared() {
@Override
public long round(long utcMillis) {
return delegatePrepared.round(utcMillis - offset) + offset;
}
@Override
public long nextRoundingValue(long utcMillis) {
return delegatePrepared.nextRoundingValue(utcMillis - offset) + offset;
}
};
} }
@Override @Override

View File

@ -313,7 +313,7 @@ public class DateUtils {
* Rounds the given utc milliseconds sicne the epoch down to the next unit millis * Rounds the given utc milliseconds sicne the epoch down to the next unit millis
* *
* Note: This does not check for correctness of the result, as this only works with units smaller or equal than a day * Note: This does not check for correctness of the result, as this only works with units smaller or equal than a day
* In order to ensure the performane of this methods, there are no guards or checks in it * In order to ensure the performance of this methods, there are no guards or checks in it
* *
* @param utcMillis the milliseconds since the epoch * @param utcMillis the milliseconds since the epoch
* @param unitMillis the unit to round to * @param unitMillis the unit to round to

View File

@ -19,11 +19,6 @@
package org.elasticsearch.search.aggregations.bucket.composite; package org.elasticsearch.search.aggregations.bucket.composite;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Objects;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Rounding;
@ -46,6 +41,11 @@ import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Objects;
/** /**
* A {@link CompositeValuesSourceBuilder} that builds a {@link RoundingValuesSource} from a {@link Script} or * A {@link CompositeValuesSourceBuilder} that builds a {@link RoundingValuesSource} from a {@link Script} or
* a field name using the provided interval. * a field name using the provided interval.
@ -255,7 +255,9 @@ public class DateHistogramValuesSourceBuilder
} }
if (orig instanceof ValuesSource.Numeric) { if (orig instanceof ValuesSource.Numeric) {
ValuesSource.Numeric numeric = (ValuesSource.Numeric) orig; ValuesSource.Numeric numeric = (ValuesSource.Numeric) orig;
RoundingValuesSource vs = new RoundingValuesSource(numeric, rounding); // TODO once composite is plugged in to the values source registry or at least understands Date values source types use it here
Rounding.Prepared preparedRounding = rounding.prepareForUnknown();
RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding);
// is specified in the builder. // is specified in the builder.
final DocValueFormat docValueFormat = format() == null ? DocValueFormat.RAW : config.format(); final DocValueFormat docValueFormat = format() == null ? DocValueFormat.RAW : config.format();
final MappedFieldType fieldType = config.fieldContext() != null ? config.fieldContext().fieldType() : null; final MappedFieldType fieldType = config.fieldContext() != null ? config.fieldContext().fieldType() : null;

View File

@ -34,14 +34,14 @@ import java.io.IOException;
*/ */
class RoundingValuesSource extends ValuesSource.Numeric { class RoundingValuesSource extends ValuesSource.Numeric {
private final ValuesSource.Numeric vs; private final ValuesSource.Numeric vs;
private final Rounding rounding; private final Rounding.Prepared rounding;
/** /**
* *
* @param vs The original values source * @param vs The original values source
* @param rounding How to round the values * @param rounding How to round the values
*/ */
RoundingValuesSource(Numeric vs, Rounding rounding) { RoundingValuesSource(Numeric vs, Rounding.Prepared rounding) {
this.vs = vs; this.vs = vs;
this.rounding = rounding; this.rounding = rounding;
} }

View File

@ -45,33 +45,37 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
/** /**
* An aggregator for date values. Every date is rounded down using a configured * An aggregator for date values that attempts to return a specific number of
* {@link Rounding}. * buckets, reconfiguring how it rounds dates to buckets on the fly as new
* * data arrives.
* @see Rounding
*/ */
class AutoDateHistogramAggregator extends DeferableBucketAggregator { class AutoDateHistogramAggregator extends DeferableBucketAggregator {
private final ValuesSource.Numeric valuesSource; private final ValuesSource.Numeric valuesSource;
private final DocValueFormat formatter; private final DocValueFormat formatter;
private final RoundingInfo[] roundingInfos; private final RoundingInfo[] roundingInfos;
private final Function<Rounding, Rounding.Prepared> roundingPreparer;
private int roundingIdx = 0; private int roundingIdx = 0;
private Rounding.Prepared preparedRounding;
private LongHash bucketOrds; private LongHash bucketOrds;
private int targetBuckets; private int targetBuckets;
private MergingBucketsDeferringCollector deferringCollector; private MergingBucketsDeferringCollector deferringCollector;
AutoDateHistogramAggregator(String name, AggregatorFactories factories, int numBuckets, RoundingInfo[] roundingInfos, AutoDateHistogramAggregator(String name, AggregatorFactories factories, int numBuckets, RoundingInfo[] roundingInfos,
@Nullable ValuesSource valuesSource, DocValueFormat formatter, SearchContext aggregationContext, Aggregator parent, Function<Rounding, Rounding.Prepared> roundingPreparer, @Nullable ValuesSource valuesSource, DocValueFormat formatter,
Map<String, Object> metadata) throws IOException { SearchContext aggregationContext, Aggregator parent, Map<String, Object> metadata) throws IOException {
super(name, factories, aggregationContext, parent, metadata); super(name, factories, aggregationContext, parent, metadata);
this.targetBuckets = numBuckets; this.targetBuckets = numBuckets;
this.valuesSource = (ValuesSource.Numeric) valuesSource; this.valuesSource = (ValuesSource.Numeric) valuesSource;
this.formatter = formatter; this.formatter = formatter;
this.roundingInfos = roundingInfos; this.roundingInfos = roundingInfos;
this.roundingPreparer = roundingPreparer;
preparedRounding = roundingPreparer.apply(roundingInfos[roundingIdx].rounding);
bucketOrds = new LongHash(1, aggregationContext.bigArrays()); bucketOrds = new LongHash(1, aggregationContext.bigArrays());
@ -113,7 +117,7 @@ class AutoDateHistogramAggregator extends DeferableBucketAggregator {
long previousRounded = Long.MIN_VALUE; long previousRounded = Long.MIN_VALUE;
for (int i = 0; i < valuesCount; ++i) { for (int i = 0; i < valuesCount; ++i) {
long value = values.nextValue(); long value = values.nextValue();
long rounded = roundingInfos[roundingIdx].rounding.round(value); long rounded = preparedRounding.round(value);
assert rounded >= previousRounded; assert rounded >= previousRounded;
if (rounded == previousRounded) { if (rounded == previousRounded) {
continue; continue;
@ -138,10 +142,10 @@ class AutoDateHistogramAggregator extends DeferableBucketAggregator {
try (LongHash oldBucketOrds = bucketOrds) { try (LongHash oldBucketOrds = bucketOrds) {
LongHash newBucketOrds = new LongHash(1, context.bigArrays()); LongHash newBucketOrds = new LongHash(1, context.bigArrays());
long[] mergeMap = new long[(int) oldBucketOrds.size()]; long[] mergeMap = new long[(int) oldBucketOrds.size()];
Rounding newRounding = roundingInfos[++roundingIdx].rounding; preparedRounding = roundingPreparer.apply(roundingInfos[++roundingIdx].rounding);
for (int i = 0; i < oldBucketOrds.size(); i++) { for (int i = 0; i < oldBucketOrds.size(); i++) {
long oldKey = oldBucketOrds.get(i); long oldKey = oldBucketOrds.get(i);
long newKey = newRounding.round(oldKey); long newKey = preparedRounding.round(oldKey);
long newBucketOrd = newBucketOrds.add(newKey); long newBucketOrd = newBucketOrds.add(newKey);
if (newBucketOrd >= 0) { if (newBucketOrd >= 0) {
mergeMap[i] = newBucketOrd; mergeMap[i] = newBucketOrd;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram; package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
@ -76,15 +77,16 @@ public final class AutoDateHistogramAggregatorFactory extends ValuesSourceAggreg
throw new AggregationExecutionException("Registry miss-match - expected AutoDateHistogramAggregationSupplier, found [" + throw new AggregationExecutionException("Registry miss-match - expected AutoDateHistogramAggregationSupplier, found [" +
aggregatorSupplier.getClass().toString() + "]"); aggregatorSupplier.getClass().toString() + "]");
} }
return ((AutoDateHistogramAggregatorSupplier) aggregatorSupplier).build(name, factories, numBuckets, roundingInfos, valuesSource, return ((AutoDateHistogramAggregatorSupplier) aggregatorSupplier).build(name, factories, numBuckets, roundingInfos,
config.format(), searchContext, parent, metadata); // TODO once auto date histo is plugged into the ValuesSource refactoring use the date values source
Rounding::prepareForUnknown, valuesSource, config.format(), searchContext, parent, metadata);
} }
@Override @Override
protected Aggregator createUnmapped(SearchContext searchContext, protected Aggregator createUnmapped(SearchContext searchContext,
Aggregator parent, Aggregator parent,
Map<String, Object> metadata) throws IOException { Map<String, Object> metadata) throws IOException {
return new AutoDateHistogramAggregator(name, factories, numBuckets, roundingInfos, null, config.format(), searchContext, parent, return new AutoDateHistogramAggregator(name, factories, numBuckets, roundingInfos, Rounding::prepareForUnknown, null,
metadata); config.format(), searchContext, parent, metadata);
} }
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram; package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -29,6 +30,7 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
@FunctionalInterface @FunctionalInterface
public interface AutoDateHistogramAggregatorSupplier extends AggregatorSupplier { public interface AutoDateHistogramAggregatorSupplier extends AggregatorSupplier {
@ -37,6 +39,8 @@ public interface AutoDateHistogramAggregatorSupplier extends AggregatorSupplier
AggregatorFactories factories, AggregatorFactories factories,
int numBuckets, int numBuckets,
AutoDateHistogramAggregationBuilder.RoundingInfo[] roundingInfos, AutoDateHistogramAggregationBuilder.RoundingInfo[] roundingInfos,
@Nullable
Function<Rounding, Rounding.Prepared> roundingPreparer,
@Nullable ValuesSource valuesSource, @Nullable ValuesSource valuesSource,
DocValueFormat formatter, DocValueFormat formatter,
SearchContext aggregationContext, SearchContext aggregationContext,

View File

@ -529,6 +529,7 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
AggregatorFactories.Builder subFactoriesBuilder) throws IOException { AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
final ZoneId tz = timeZone(); final ZoneId tz = timeZone();
final Rounding rounding = dateHistogramInterval.createRounding(tz, offset); final Rounding rounding = dateHistogramInterval.createRounding(tz, offset);
// TODO once we optimize TimeIntervalRounding we won't need to rewrite the time zone
final ZoneId rewrittenTimeZone = rewriteTimeZone(queryShardContext); final ZoneId rewrittenTimeZone = rewriteTimeZone(queryShardContext);
final Rounding shardRounding; final Rounding shardRounding;
if (tz == rewrittenTimeZone) { if (tz == rewrittenTimeZone) {

View File

@ -37,7 +37,7 @@ public interface DateHistogramAggregationSupplier extends AggregatorSupplier {
Aggregator build(String name, Aggregator build(String name,
AggregatorFactories factories, AggregatorFactories factories,
Rounding rounding, Rounding rounding,
Rounding shardRounding, Rounding.Prepared preparedRounding,
BucketOrder order, BucketOrder order,
boolean keyed, boolean keyed,
long minDocCount, long minDocCount,

View File

@ -54,7 +54,10 @@ class DateHistogramAggregator extends BucketsAggregator {
private final ValuesSource.Numeric valuesSource; private final ValuesSource.Numeric valuesSource;
private final DocValueFormat formatter; private final DocValueFormat formatter;
private final Rounding rounding; private final Rounding rounding;
private final Rounding shardRounding; /**
* The rounding prepared for rewriting the data in the shard.
*/
private final Rounding.Prepared preparedRounding;
private final BucketOrder order; private final BucketOrder order;
private final boolean keyed; private final boolean keyed;
@ -63,21 +66,21 @@ class DateHistogramAggregator extends BucketsAggregator {
private final LongHash bucketOrds; private final LongHash bucketOrds;
DateHistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, Rounding shardRounding, DateHistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, Rounding.Prepared preparedRounding,
BucketOrder order, boolean keyed, BucketOrder order, boolean keyed,
long minDocCount, @Nullable ExtendedBounds extendedBounds, @Nullable ValuesSource.Numeric valuesSource, long minDocCount, @Nullable ExtendedBounds extendedBounds, @Nullable ValuesSource valuesSource,
DocValueFormat formatter, SearchContext aggregationContext, DocValueFormat formatter, SearchContext aggregationContext,
Aggregator parent, Map<String, Object> metadata) throws IOException { Aggregator parent, Map<String, Object> metadata) throws IOException {
super(name, factories, aggregationContext, parent, metadata); super(name, factories, aggregationContext, parent, metadata);
this.rounding = rounding; this.rounding = rounding;
this.shardRounding = shardRounding; this.preparedRounding = preparedRounding;
this.order = order; this.order = order;
order.validate(this); order.validate(this);
this.keyed = keyed; this.keyed = keyed;
this.minDocCount = minDocCount; this.minDocCount = minDocCount;
this.extendedBounds = extendedBounds; this.extendedBounds = extendedBounds;
this.valuesSource = valuesSource; this.valuesSource = (ValuesSource.Numeric) valuesSource;
this.formatter = formatter; this.formatter = formatter;
bucketOrds = new LongHash(1, aggregationContext.bigArrays()); bucketOrds = new LongHash(1, aggregationContext.bigArrays());
@ -110,7 +113,7 @@ class DateHistogramAggregator extends BucketsAggregator {
long value = values.nextValue(); long value = values.nextValue();
// We can use shardRounding here, which is sometimes more efficient // We can use shardRounding here, which is sometimes more efficient
// if daylight saving times are involved. // if daylight saving times are involved.
long rounded = shardRounding.round(value); long rounded = preparedRounding.round(value);
assert rounded >= previousRounded; assert rounded >= previousRounded;
if (rounded == previousRounded) { if (rounded == previousRounded) {
continue; continue;

View File

@ -19,11 +19,9 @@
package org.elasticsearch.search.aggregations.bucket.histogram; package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Rounding;
import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.common.collect.List;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -38,54 +36,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Map; import java.util.Map;
public final class DateHistogramAggregatorFactory extends ValuesSourceAggregatorFactory { public final class DateHistogramAggregatorFactory extends ValuesSourceAggregatorFactory {
public static void registerAggregators(ValuesSourceRegistry.Builder builder) { public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
builder.register(DateHistogramAggregationBuilder.NAME, builder.register(DateHistogramAggregationBuilder.NAME,
Arrays.asList(CoreValuesSourceType.DATE, CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN), List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN),
(DateHistogramAggregationSupplier) (String name, (DateHistogramAggregationSupplier) DateHistogramAggregator::new);
AggregatorFactories factories,
Rounding rounding,
Rounding shardRounding,
BucketOrder order,
boolean keyed,
long minDocCount,
@Nullable ExtendedBounds extendedBounds,
@Nullable ValuesSource valuesSource,
DocValueFormat formatter,
SearchContext aggregationContext,
Aggregator parent,
Map<String, Object> metadata) -> new DateHistogramAggregator(name,
factories, rounding, shardRounding, order, keyed, minDocCount, extendedBounds, (ValuesSource.Numeric) valuesSource,
formatter, aggregationContext, parent, metadata));
builder.register(DateHistogramAggregationBuilder.NAME, builder.register(DateHistogramAggregationBuilder.NAME,
CoreValuesSourceType.RANGE, CoreValuesSourceType.RANGE,
(DateHistogramAggregationSupplier) (String name, (DateHistogramAggregationSupplier) DateRangeHistogramAggregator::new);
AggregatorFactories factories,
Rounding rounding,
Rounding shardRounding,
BucketOrder order,
boolean keyed,
long minDocCount,
@Nullable ExtendedBounds extendedBounds,
@Nullable ValuesSource valuesSource,
DocValueFormat formatter,
SearchContext aggregationContext,
Aggregator parent,
Map<String, Object> metadata) -> {
ValuesSource.Range rangeValueSource = (ValuesSource.Range) valuesSource;
if (rangeValueSource.rangeType() != RangeType.DATE) {
throw new IllegalArgumentException("Expected date range type but found range type [" + rangeValueSource.rangeType().name
+ "]");
}
return new DateRangeHistogramAggregator(name,
factories, rounding, shardRounding, order, keyed, minDocCount, extendedBounds, rangeValueSource, formatter,
aggregationContext, parent, metadata); });
} }
private final BucketOrder order; private final BucketOrder order;
@ -128,15 +90,17 @@ public final class DateHistogramAggregatorFactory extends ValuesSourceAggregator
throw new AggregationExecutionException("Registry miss-match - expected DateHistogramAggregationSupplier, found [" + throw new AggregationExecutionException("Registry miss-match - expected DateHistogramAggregationSupplier, found [" +
aggregatorSupplier.getClass().toString() + "]"); aggregatorSupplier.getClass().toString() + "]");
} }
return ((DateHistogramAggregationSupplier) aggregatorSupplier).build(name, factories, rounding, shardRounding, order, keyed, Rounding.Prepared preparedRounding = valuesSource.roundingPreparer(queryShardContext.getIndexReader()).apply(shardRounding);
minDocCount, extendedBounds, valuesSource, config.format(), searchContext, parent, metadata); return ((DateHistogramAggregationSupplier) aggregatorSupplier).build(name, factories, rounding, preparedRounding, order, keyed,
minDocCount, extendedBounds, valuesSource, config.format(), searchContext,
parent, metadata);
} }
@Override @Override
protected Aggregator createUnmapped(SearchContext searchContext, protected Aggregator createUnmapped(SearchContext searchContext,
Aggregator parent, Aggregator parent,
Map<String, Object> metadata) throws IOException { Map<String, Object> metadata) throws IOException {
return new DateHistogramAggregator(name, factories, rounding, shardRounding, order, keyed, minDocCount, extendedBounds, return new DateHistogramAggregator(name, factories, rounding, null, order, keyed, minDocCount, extendedBounds,
null, config.format(), searchContext, parent, metadata); null, config.format(), searchContext, parent, metadata);
} }
} }

View File

@ -57,7 +57,10 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
private final ValuesSource.Range valuesSource; private final ValuesSource.Range valuesSource;
private final DocValueFormat formatter; private final DocValueFormat formatter;
private final Rounding rounding; private final Rounding rounding;
private final Rounding shardRounding; /**
* The rounding prepared for rewriting the data in the shard.
*/
private final Rounding.Prepared preparedRounding;
private final BucketOrder order; private final BucketOrder order;
private final boolean keyed; private final boolean keyed;
@ -66,22 +69,26 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
private final LongHash bucketOrds; private final LongHash bucketOrds;
DateRangeHistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, Rounding shardRounding, DateRangeHistogramAggregator(String name, AggregatorFactories factories, Rounding rounding, Rounding.Prepared preparedRounding,
BucketOrder order, boolean keyed, BucketOrder order, boolean keyed,
long minDocCount, @Nullable ExtendedBounds extendedBounds, @Nullable ValuesSource.Range valuesSource, long minDocCount, @Nullable ExtendedBounds extendedBounds, @Nullable ValuesSource valuesSource,
DocValueFormat formatter, SearchContext aggregationContext, DocValueFormat formatter, SearchContext aggregationContext,
Aggregator parent, Map<String, Object> metadata) throws IOException { Aggregator parent, Map<String, Object> metadata) throws IOException {
super(name, factories, aggregationContext, parent, metadata); super(name, factories, aggregationContext, parent, metadata);
this.rounding = rounding; this.rounding = rounding;
this.shardRounding = shardRounding; this.preparedRounding = preparedRounding;
this.order = order; this.order = order;
order.validate(this); order.validate(this);
this.keyed = keyed; this.keyed = keyed;
this.minDocCount = minDocCount; this.minDocCount = minDocCount;
this.extendedBounds = extendedBounds; this.extendedBounds = extendedBounds;
this.valuesSource = valuesSource; this.valuesSource = (ValuesSource.Range) valuesSource;
this.formatter = formatter; this.formatter = formatter;
if (this.valuesSource.rangeType() != RangeType.DATE) {
throw new IllegalArgumentException("Expected date range type but found range type [" + this.valuesSource.rangeType().name
+ "]");
}
bucketOrds = new LongHash(1, aggregationContext.bigArrays()); bucketOrds = new LongHash(1, aggregationContext.bigArrays());
} }
@ -122,10 +129,10 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
// The encoding should ensure that this assert is always true. // The encoding should ensure that this assert is always true.
assert from >= previousFrom : "Start of range not >= previous start"; assert from >= previousFrom : "Start of range not >= previous start";
final Long to = (Long) range.getTo(); final Long to = (Long) range.getTo();
final long startKey = shardRounding.round(from); final long startKey = preparedRounding.round(from);
final long endKey = shardRounding.round(to); final long endKey = preparedRounding.round(to);
for (long key = startKey > previousKey ? startKey : previousKey; key <= endKey; for (long key = startKey > previousKey ? startKey : previousKey; key <= endKey;
key = shardRounding.nextRoundingValue(key)) { key = preparedRounding.nextRoundingValue(key)) {
if (key == previousKey) { if (key == previousKey) {
continue; continue;
} }

View File

@ -19,7 +19,11 @@
package org.elasticsearch.search.aggregations.support; package org.elasticsearch.search.aggregations.support;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
@ -27,17 +31,20 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.AggregationScript;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import java.io.IOException;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZoneOffset; import java.time.ZoneOffset;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.function.Function;
import java.util.function.LongSupplier; import java.util.function.LongSupplier;
/** /**
@ -234,7 +241,51 @@ public enum CoreValuesSourceType implements ValuesSourceType {
@Override @Override
public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) { public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) {
return NUMERIC.getField(fieldContext, script); ValuesSource.Numeric dataSource = fieldData(fieldContext);
if (script != null) {
// Value script case
return new ValuesSource.Numeric.WithScript(dataSource, script);
}
return dataSource;
}
private ValuesSource.Numeric fieldData(FieldContext fieldContext) {
if ((fieldContext.indexFieldData() instanceof IndexNumericFieldData) == false) {
throw new IllegalArgumentException("Expected numeric type on field [" + fieldContext.field() +
"], but got [" + fieldContext.fieldType().typeName() + "]");
}
if (fieldContext.fieldType().indexOptions() == IndexOptions.NONE
|| fieldContext.fieldType() instanceof DateFieldType == false) {
/*
* We can't implement roundingPreparer in these cases because
* we can't look up the min and max date without both the
* search index (the first test) and the resolution which is
* on the DateFieldType.
*/
return new ValuesSource.Numeric.FieldData((IndexNumericFieldData) fieldContext.indexFieldData());
}
return new ValuesSource.Numeric.FieldData((IndexNumericFieldData) fieldContext.indexFieldData()) {
/**
* Proper dates get a real implementation of
* {@link #roundingPreparer(IndexReader)}. If the field is
* configured with a script or a missing value then we'll
* wrap this without delegating so those fields will ignore
* this implementation. Which is correct.
*/
@Override
public Function<Rounding, Rounding.Prepared> roundingPreparer(IndexReader reader) throws IOException {
DateFieldType dft = (DateFieldType) fieldContext.fieldType();
byte[] min = PointValues.getMinPackedValue(reader, fieldContext.field());
if (min == null) {
// There aren't any indexes values so we don't need to optimize.
return Rounding::prepareForUnknown;
}
byte[] max = PointValues.getMaxPackedValue(reader, fieldContext.field());
long minUtcMillis = dft.resolution().parsePointAsMillis(min);
long maxUtcMillis = dft.resolution().parsePointAsMillis(max);
return rounding -> rounding.prepare(minUtcMillis, maxUtcMillis);
}
};
} }
@Override @Override

View File

@ -23,7 +23,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
/** /**
* Used by all field data based aggregators. This determine the context of the field data the aggregators are operating * Used by all field data based aggregators. This determine the context of the field data the aggregators are operating
* in. I holds both the field names and the index field datas that are associated with them. * in. It holds both the field names and the index field datas that are associated with them.
*/ */
public class FieldContext { public class FieldContext {

View File

@ -28,15 +28,17 @@ import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorable; import org.apache.lucene.search.Scorable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.Rounding.Prepared;
import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues;
import org.elasticsearch.index.fielddata.LeafOrdinalsFieldData;
import org.elasticsearch.index.fielddata.DocValueBits; import org.elasticsearch.index.fielddata.DocValueBits;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.LeafOrdinalsFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
@ -44,12 +46,14 @@ import org.elasticsearch.index.fielddata.SortingBinaryDocValues;
import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.mapper.RangeType;
import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.AggregationScript;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.WithScript.BytesValues; import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.WithScript.BytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues; import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues; import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import java.io.IOException; import java.io.IOException;
import java.util.function.Function;
import java.util.function.LongUnaryOperator; import java.util.function.LongUnaryOperator;
public abstract class ValuesSource { public abstract class ValuesSource {
@ -66,6 +70,14 @@ public abstract class ValuesSource {
return false; return false;
} }
/**
* Build a function prepares rounding values to be called many times.
* <p>
* This returns a {@linkplain Function} because auto date histogram will
* need to call it many times over the course of running the aggregation.
*/
public abstract Function<Rounding, Rounding.Prepared> roundingPreparer(IndexReader reader) throws IOException;
public static class Range extends ValuesSource { public static class Range extends ValuesSource {
private final RangeType rangeType; private final RangeType rangeType;
protected final IndexFieldData<?> indexFieldData; protected final IndexFieldData<?> indexFieldData;
@ -86,6 +98,12 @@ public abstract class ValuesSource {
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes); return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes);
} }
@Override
public Function<Rounding, Prepared> roundingPreparer(IndexReader reader) throws IOException {
// TODO lookup the min and max rounding when appropriate
return Rounding::prepareForUnknown;
}
public RangeType rangeType() { return rangeType; } public RangeType rangeType() { return rangeType; }
} }
public abstract static class Bytes extends ValuesSource { public abstract static class Bytes extends ValuesSource {
@ -96,6 +114,11 @@ public abstract class ValuesSource {
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes); return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes);
} }
@Override
public final Function<Rounding, Rounding.Prepared> roundingPreparer(IndexReader reader) throws IOException {
throw new AggregationExecutionException("can't round a [BYTES]");
}
public abstract static class WithOrdinals extends Bytes { public abstract static class WithOrdinals extends Bytes {
public static final WithOrdinals EMPTY = new WithOrdinals() { public static final WithOrdinals EMPTY = new WithOrdinals() {
@ -351,6 +374,11 @@ public abstract class ValuesSource {
} }
} }
@Override
public Function<Rounding, Prepared> roundingPreparer(IndexReader reader) throws IOException {
return Rounding::prepareForUnknown;
}
/** /**
* {@link ValuesSource} subclass for Numeric fields with a Value Script applied * {@link ValuesSource} subclass for Numeric fields with a Value Script applied
*/ */
@ -543,6 +571,11 @@ public abstract class ValuesSource {
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(geoPoints); return org.elasticsearch.index.fielddata.FieldData.docsWithValue(geoPoints);
} }
@Override
public final Function<Rounding, Rounding.Prepared> roundingPreparer(IndexReader reader) throws IOException {
throw new AggregationExecutionException("can't round a [GEO_POINT]");
}
public abstract MultiGeoPointValues geoPointValues(LeafReaderContext context); public abstract MultiGeoPointValues geoPointValues(LeafReaderContext context);
public static class Fielddata extends GeoPoint { public static class Fielddata extends GeoPoint {

View File

@ -0,0 +1,380 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import org.elasticsearch.common.LocalTimeOffset.Gap;
import org.elasticsearch.common.LocalTimeOffset.Overlap;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.test.ESTestCase;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.zone.ZoneOffsetTransition;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
public class LocalTimeOffsetTests extends ESTestCase {
public void testRangeTooLarge() {
ZoneId zone = ZoneId.of("America/New_York");
assertThat(LocalTimeOffset.lookup(zone, Long.MIN_VALUE, Long.MAX_VALUE), nullValue());
}
public void testNotFixed() {
ZoneId zone = ZoneId.of("America/New_York");
assertThat(LocalTimeOffset.lookupFixedOffset(zone), nullValue());
}
public void testUtc() {
assertFixOffset(ZoneId.of("UTC"), 0);
}
public void testFixedOffset() {
ZoneOffset zone = ZoneOffset.ofTotalSeconds(between((int) -TimeUnit.HOURS.toSeconds(18), (int) TimeUnit.HOURS.toSeconds(18)));
assertFixOffset(zone, zone.getTotalSeconds() * 1000);
}
private void assertFixOffset(ZoneId zone, long offsetMillis) {
LocalTimeOffset fixed = LocalTimeOffset.lookupFixedOffset(zone);
assertThat(fixed, notNullValue());
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(zone, Long.MIN_VALUE, Long.MAX_VALUE);
assertThat(lookup.size(), equalTo(1));
long min = randomLong();
long max = randomValueOtherThan(min, ESTestCase::randomLong);
if (min > max) {
long s = min;
min = max;
max = s;
}
LocalTimeOffset fixedInRange = lookup.fixedInRange(min, max);
assertThat(fixedInRange, notNullValue());
assertRoundingAtOffset(randomBoolean() ? fixed : fixedInRange, randomLong(), offsetMillis);
}
private void assertRoundingAtOffset(LocalTimeOffset offset, long time, long offsetMillis) {
assertThat(offset.utcToLocalTime(time), equalTo(time + offsetMillis));
assertThat(offset.localToUtcInThisOffset(time + offsetMillis), equalTo(time));
assertThat(offset.localToUtc(time + offsetMillis, unusedStrategy()), equalTo(time));
}
public void testJustTransitions() {
ZoneId zone = ZoneId.of("America/New_York");
long min = time("1980-01-01", zone);
long max = time("1981-01-01", zone) - 1;
assertThat(Instant.ofEpochMilli(max), lessThan(lastTransitionIn(zone).getInstant()));
assertTransitions(zone, min, max, time("1980-06-01", zone), min + hours(1), 3, hours(-5), hours(-4));
}
public void testTransitionsWithTransitionsAndRules() {
ZoneId zone = ZoneId.of("America/New_York");
long min = time("1980-01-01", zone);
long max = time("2021-01-01", zone) - 1;
assertThat(Instant.ofEpochMilli(min), lessThan(lastTransitionIn(zone).getInstant()));
assertThat(Instant.ofEpochMilli(max), greaterThan(lastTransitionIn(zone).getInstant()));
assertTransitions(zone, min, max, time("2000-06-01", zone), min + hours(1), 83, hours(-5), hours(-4));
assertThat(LocalTimeOffset.lookup(zone, min, max).fixedInRange(utcTime("2000-06-01"), utcTime("2000-06-02")), notNullValue());
}
public void testAfterRules() {
ZoneId zone = ZoneId.of("America/New_York");
long min = time("2020-01-01", zone);
long max = time("2021-01-01", zone) - 1;
assertThat(Instant.ofEpochMilli(min), greaterThan(lastTransitionIn(zone).getInstant()));
assertTransitions(zone, min, max, time("2020-06-01", zone), min + hours(1), 3, hours(-5), hours(-4));
}
private void assertTransitions(ZoneId zone, long min, long max, long between, long sameOffsetAsMin,
int size, long minMaxOffset, long betweenOffset) {
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(zone, min, max);
assertThat(lookup.size(), equalTo(size));
assertRoundingAtOffset(lookup.lookup(min), min, minMaxOffset);
assertRoundingAtOffset(lookup.lookup(between), between, betweenOffset);
assertRoundingAtOffset(lookup.lookup(max), max, minMaxOffset);
assertThat(lookup.fixedInRange(min, max), nullValue());
assertThat(lookup.fixedInRange(min, sameOffsetAsMin), sameInstance(lookup.lookup(min)));
}
// Some sanity checks for when you pas a single time. We don't expect to do this much but it shouldn't be totally borked.
public void testSingleTimeBeforeRules() {
ZoneId zone = ZoneId.of("America/New_York");
long time = time("1980-01-01", zone);
assertThat(Instant.ofEpochMilli(time), lessThan(lastTransitionIn(zone).getInstant()));
assertRoundingAtOffset(LocalTimeOffset.lookup(zone, time, time).lookup(time), time, hours(-5));
}
public void testSingleTimeAfterRules() {
ZoneId zone = ZoneId.of("America/New_York");
long time = time("2020-01-01", zone);
assertThat(Instant.ofEpochMilli(time), greaterThan(lastTransitionIn(zone).getInstant()));
assertRoundingAtOffset(LocalTimeOffset.lookup(zone, time, time).lookup(time), time, hours(-5));
}
public void testJustOneRuleApplies() {
ZoneId zone = ZoneId.of("Atlantic/Azores");
long time = time("2000-10-30T00:00:00", zone);
assertRoundingAtOffset(LocalTimeOffset.lookup(zone, time, time).lookup(time), time, hours(-1));
}
public void testLastTransitionWithoutRules() {
/*
* Asia/Kathmandu turned their clocks 15 minutes forward at
* 1986-01-01T00:00:00 local time and hasn't changed time since.
* This has broken the transition collection code in the past.
*/
ZoneId zone = ZoneId.of("Asia/Kathmandu");
long time = time("1986-01-01T00:00:00", zone);
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(zone, time - 1, time);
assertThat(lookup.size(), equalTo(2));
assertRoundingAtOffset(lookup.lookup(time - 1), time - 1, TimeUnit.MINUTES.toMillis(330));
assertRoundingAtOffset(lookup.lookup(time), time, TimeUnit.MINUTES.toMillis(345));
}
public void testOverlap() {
/*
* Europe/Rome turn their clocks back an hour 1978 which is totally
* normal, but they rolled back past midnight which is pretty rare and neat.
*/
ZoneId tz = ZoneId.of("Europe/Rome");
long overlapMillis = TimeUnit.HOURS.toMillis(1);
long firstMidnight = utcTime("1978-09-30T22:00:00");
long secondMidnight = utcTime("1978-09-30T23:00:00");
long overlapEnds = utcTime("1978-10-01T0:00:00");
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(tz, firstMidnight, overlapEnds);
LocalTimeOffset secondMidnightOffset = lookup.lookup(secondMidnight);
long localSecondMidnight = secondMidnightOffset.utcToLocalTime(secondMidnight);
LocalTimeOffset firstMidnightOffset = lookup.lookup(firstMidnight);
long localFirstMidnight = firstMidnightOffset.utcToLocalTime(firstMidnight);
assertThat(localSecondMidnight - localFirstMidnight, equalTo(0L));
assertThat(lookup.lookup(overlapEnds), sameInstance(secondMidnightOffset));
long localOverlapEnds = secondMidnightOffset.utcToLocalTime(overlapEnds);
assertThat(localOverlapEnds - localSecondMidnight, equalTo(overlapMillis));
long localOverlappingTime = randomLongBetween(localFirstMidnight, localOverlapEnds);
assertThat(firstMidnightOffset.localToUtcInThisOffset(localFirstMidnight - 1), equalTo(firstMidnight - 1));
assertThat(secondMidnightOffset.localToUtcInThisOffset(localFirstMidnight - 1), equalTo(secondMidnight - 1));
assertThat(firstMidnightOffset.localToUtcInThisOffset(localFirstMidnight), equalTo(firstMidnight));
assertThat(secondMidnightOffset.localToUtcInThisOffset(localFirstMidnight), equalTo(secondMidnight));
assertThat(secondMidnightOffset.localToUtcInThisOffset(localOverlapEnds), equalTo(overlapEnds));
assertThat(secondMidnightOffset.localToUtcInThisOffset(localOverlappingTime),
equalTo(firstMidnightOffset.localToUtcInThisOffset(localOverlappingTime) + overlapMillis));
long beforeOverlapValue = randomLong();
assertThat(secondMidnightOffset.localToUtc(localFirstMidnight - 1, useValueForBeforeOverlap(beforeOverlapValue)),
equalTo(beforeOverlapValue));
long overlapValue = randomLong();
assertThat(secondMidnightOffset.localToUtc(localFirstMidnight, useValueForOverlap(overlapValue)), equalTo(overlapValue));
assertThat(secondMidnightOffset.localToUtc(localOverlapEnds, unusedStrategy()), equalTo(overlapEnds));
assertThat(secondMidnightOffset.localToUtc(localOverlappingTime, useValueForOverlap(overlapValue)), equalTo(overlapValue));
}
public void testGap() {
/*
* Asia/Kathmandu turned their clocks 15 minutes forward at
* 1986-01-01T00:00:00, creating a really "fun" gap.
*/
ZoneId tz = ZoneId.of("Asia/Kathmandu");
long gapLength = TimeUnit.MINUTES.toMillis(15);
long transition = time("1986-01-01T00:00:00", tz);
LocalTimeOffset.Lookup lookup = LocalTimeOffset.lookup(tz, transition - 1, transition);
LocalTimeOffset gapOffset = lookup.lookup(transition);
long localAtTransition = gapOffset.utcToLocalTime(transition);
LocalTimeOffset beforeGapOffset = lookup.lookup(transition - 1);
long localBeforeTransition = beforeGapOffset.utcToLocalTime(transition - 1);
assertThat(localAtTransition - localBeforeTransition, equalTo(gapLength + 1));
assertThat(beforeGapOffset.localToUtcInThisOffset(localBeforeTransition), equalTo(transition - 1));
assertThat(gapOffset.localToUtcInThisOffset(localBeforeTransition), equalTo(transition - 1 - gapLength));
assertThat(gapOffset.localToUtcInThisOffset(localAtTransition), equalTo(transition));
long beforeGapValue = randomLong();
assertThat(gapOffset.localToUtc(localBeforeTransition, useValueForBeforeGap(beforeGapValue)), equalTo(beforeGapValue));
assertThat(gapOffset.localToUtc(localAtTransition, unusedStrategy()), equalTo(transition));
long gapValue = randomLong();
long localSkippedTime = randomLongBetween(localBeforeTransition, localAtTransition);
assertThat(gapOffset.localToUtc(localSkippedTime, useValueForGap(gapValue)), equalTo(gapValue));
}
private static long utcTime(String time) {
return DateFormatter.forPattern("date_optional_time").parseMillis(time);
}
private static long time(String time, ZoneId zone) {
return DateFormatter.forPattern("date_optional_time").withZone(zone).parseMillis(time);
}
/**
* The the last "fully defined" transitions in the provided {@linkplain ZoneId}.
*/
private static ZoneOffsetTransition lastTransitionIn(ZoneId zone) {
List<ZoneOffsetTransition> transitions = zone.getRules().getTransitions();
return transitions.get(transitions.size() -1);
}
private static LocalTimeOffset.Strategy unusedStrategy() {
return new LocalTimeOffset.Strategy() {
@Override
public long inGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
};
}
private static LocalTimeOffset.Strategy useValueForGap(long gapValue) {
return new LocalTimeOffset.Strategy() {
@Override
public long inGap(long localMillis, Gap gap) {
return gapValue;
}
@Override
public long beforeGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
};
}
private static LocalTimeOffset.Strategy useValueForBeforeGap(long beforeGapValue) {
return new LocalTimeOffset.Strategy() {
@Override
public long inGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeGap(long localMillis, Gap gap) {
return beforeGapValue;
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
};
}
private static LocalTimeOffset.Strategy useValueForOverlap(long overlapValue) {
return new LocalTimeOffset.Strategy() {
@Override
public long inGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
return overlapValue;
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
};
}
private static LocalTimeOffset.Strategy useValueForBeforeOverlap(long beforeOverlapValue) {
return new LocalTimeOffset.Strategy() {
@Override
public long inGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeGap(long localMillis, Gap gap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long inOverlap(long localMillis, Overlap overlap) {
fail("Shouldn't be called");
return 0;
}
@Override
public long beforeOverlap(long localMillis, Overlap overlap) {
return beforeOverlapValue;
}
};
}
private static long hours(long hours) {
return TimeUnit.HOURS.toMillis(hours);
}
}

View File

@ -226,19 +226,23 @@ public class RoundingTests extends ESTestCase {
* described in * described in
* {@link #assertInterval(long, long, long, Rounding, ZoneId)} * {@link #assertInterval(long, long, long, Rounding, ZoneId)}
*/ */
public void testRoundingRandom() { public void testRandomTimeUnitRounding() {
for (int i = 0; i < 1000; ++i) { for (int i = 0; i < 1000; ++i) {
Rounding.DateTimeUnit unit = randomFrom(Rounding.DateTimeUnit.values()); Rounding.DateTimeUnit unit = randomFrom(Rounding.DateTimeUnit.values());
ZoneId tz = randomZone(); ZoneId tz = randomZone();
Rounding rounding = new Rounding.TimeUnitRounding(unit, tz); Rounding rounding = new Rounding.TimeUnitRounding(unit, tz);
long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 long[] bounds = randomDateBounds();
Rounding.Prepared prepared = rounding.prepare(bounds[0], bounds[1]);
// Check that rounding is internally consistent and consistent with nextRoundingValue
long date = dateBetween(bounds[0], bounds[1]);
long unitMillis = unit.getField().getBaseUnit().getDuration().toMillis(); long unitMillis = unit.getField().getBaseUnit().getDuration().toMillis();
// FIXME this was copy pasted from the other impl and not used. breaks the nasty date actually gets assigned // FIXME this was copy pasted from the other impl and not used. breaks the nasty date actually gets assigned
if (randomBoolean()) { if (randomBoolean()) {
nastyDate(date, tz, unitMillis); nastyDate(date, tz, unitMillis);
} }
final long roundedDate = rounding.round(date); final long roundedDate = prepared.round(date);
final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); final long nextRoundingValue = prepared.nextRoundingValue(roundedDate);
assertInterval(roundedDate, date, nextRoundingValue, rounding, tz); assertInterval(roundedDate, date, nextRoundingValue, rounding, tz);
@ -252,6 +256,26 @@ public class RoundingTests extends ESTestCase {
+ Instant.ofEpochMilli(roundedDate), nextRoundingValue - roundedDate, equalTo(unitMillis)); + Instant.ofEpochMilli(roundedDate), nextRoundingValue - roundedDate, equalTo(unitMillis));
} }
} }
// Round a whole bunch of dates and make sure they line up with the known good java time implementation
Rounding.Prepared javaTimeRounding = rounding.prepareJavaTime();
for (int d = 0; d < 1000; d++) {
date = dateBetween(bounds[0], bounds[1]);
long javaRounded = javaTimeRounding.round(date);
long esRounded = prepared.round(date);
if (javaRounded != esRounded) {
fail("Expected [" + rounding + "] to round [" + Instant.ofEpochMilli(date) + "] to ["
+ Instant.ofEpochMilli(javaRounded) + "] but instead rounded to [" + Instant.ofEpochMilli(esRounded) + "]");
}
long javaNextRoundingValue = javaTimeRounding.nextRoundingValue(date);
long esNextRoundingValue = prepared.nextRoundingValue(date);
if (javaNextRoundingValue != esNextRoundingValue) {
fail("Expected [" + rounding + "] to round [" + Instant.ofEpochMilli(date) + "] to ["
+ Instant.ofEpochMilli(esRounded) + "] and nextRoundingValue to be ["
+ Instant.ofEpochMilli(javaNextRoundingValue) + "] but instead was to ["
+ Instant.ofEpochMilli(esNextRoundingValue) + "]");
}
}
} }
} }
@ -361,10 +385,7 @@ public class RoundingTests extends ESTestCase {
assertThat(rounding.round(time("2016-03-28T13:00:00+02:00")), isDate(time("2016-03-28T12:00:00+02:00"), tz)); assertThat(rounding.round(time("2016-03-28T13:00:00+02:00")), isDate(time("2016-03-28T12:00:00+02:00"), tz));
} }
/** public void testRandomTimeIntervalRounding() {
* randomized test on {@link org.elasticsearch.common.Rounding.TimeIntervalRounding} with random interval and time zone offsets
*/
public void testIntervalRoundingRandom() {
for (int i = 0; i < 1000; i++) { for (int i = 0; i < 1000; i++) {
TimeUnit unit = randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS); TimeUnit unit = randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS);
long interval = unit.toMillis(randomIntBetween(1, 365)); long interval = unit.toMillis(randomIntBetween(1, 365));
@ -703,6 +724,60 @@ public class RoundingTests extends ESTestCase {
assertInterval(midnightAfterTransition, nextMidnight, rounding, 24 * 60, tz); assertInterval(midnightAfterTransition, nextMidnight, rounding, 24 * 60, tz);
} }
public void testBeforeOverlapLarge() {
// Moncton has a perfectly normal hour long Daylight Savings time.
ZoneId tz = ZoneId.of("America/Moncton");
Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build();
assertThat(rounding.round(time("2003-10-26T03:43:35.079Z")), isDate(time("2003-10-26T03:00:00Z"), tz));
}
public void testBeforeOverlapSmall() {
/*
* Lord Howe is fun because Daylight Savings time is only 30 minutes
* so we round HOUR_OF_DAY differently.
*/
ZoneId tz = ZoneId.of("Australia/Lord_Howe");
Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build();
assertThat(rounding.round(time("2018-03-31T15:25:15.148Z")), isDate(time("2018-03-31T14:00:00Z"), tz));
}
public void testQuarterOfYear() {
/*
* If we're not careful with how we look up local time offsets we can
* end up not loading the offsets far enough back to round this time
* to QUARTER_OF_YEAR properly.
*/
ZoneId tz = ZoneId.of("Asia/Baghdad");
Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).timeZone(tz).build();
assertThat(rounding.round(time("2006-12-31T13:21:44.308Z")), isDate(time("2006-09-30T20:00:00Z"), tz));
}
public void testPrepareLongRangeRoundsToMidnight() {
ZoneId tz = ZoneId.of("America/New_York");
long min = time("01980-01-01T00:00:00Z");
long max = time("10000-01-01T00:00:00Z");
Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.QUARTER_OF_YEAR).timeZone(tz).build();
assertThat(rounding.round(time("2006-12-31T13:21:44.308Z")), isDate(time("2006-10-01T04:00:00Z"), tz));
assertThat(rounding.round(time("9000-12-31T13:21:44.308Z")), isDate(time("9000-10-01T04:00:00Z"), tz));
Rounding.Prepared prepared = rounding.prepare(min, max);
assertThat(prepared.round(time("2006-12-31T13:21:44.308Z")), isDate(time("2006-10-01T04:00:00Z"), tz));
assertThat(prepared.round(time("9000-12-31T13:21:44.308Z")), isDate(time("9000-10-01T04:00:00Z"), tz));
}
public void testPrepareLongRangeRoundsNotToMidnight() {
ZoneId tz = ZoneId.of("Australia/Lord_Howe");
long min = time("01980-01-01T00:00:00Z");
long max = time("10000-01-01T00:00:00Z");
Rounding rounding = Rounding.builder(Rounding.DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build();
assertThat(rounding.round(time("2018-03-31T15:25:15.148Z")), isDate(time("2018-03-31T14:00:00Z"), tz));
assertThat(rounding.round(time("9000-03-31T15:25:15.148Z")), isDate(time("9000-03-31T15:00:00Z"), tz));
Rounding.Prepared prepared = rounding.prepare(min, max);
assertThat(prepared.round(time("2018-03-31T15:25:15.148Z")), isDate(time("2018-03-31T14:00:00Z"), tz));
assertThat(prepared.round(time("9000-03-31T15:25:15.148Z")), isDate(time("9000-03-31T15:00:00Z"), tz));
}
private void assertInterval(long rounded, long nextRoundingValue, Rounding rounding, int minutes, private void assertInterval(long rounded, long nextRoundingValue, Rounding rounding, int minutes,
ZoneId tz) { ZoneId tz) {
assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding, tz); assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding, tz);
@ -718,9 +793,9 @@ public class RoundingTests extends ESTestCase {
* @param rounding the rounding instance * @param rounding the rounding instance
*/ */
private void assertInterval(long rounded, long unrounded, long nextRoundingValue, Rounding rounding, ZoneId tz) { private void assertInterval(long rounded, long unrounded, long nextRoundingValue, Rounding rounding, ZoneId tz) {
assertThat("rounding should be idempotent ", rounding.round(rounded), isDate(rounded, tz)); assertThat("rounding should be idempotent", rounding.round(rounded), isDate(rounded, tz));
assertThat("rounded value smaller or equal than unrounded" + rounding, rounded, lessThanOrEqualTo(unrounded)); assertThat("rounded value smaller or equal than unrounded", rounded, lessThanOrEqualTo(unrounded));
assertThat("values less than rounded should round further down" + rounding, rounding.round(rounded - 1), lessThan(rounded)); assertThat("values less than rounded should round further down", rounding.round(rounded - 1), lessThan(rounded));
assertThat("nextRounding value should be a rounded date", rounding.round(nextRoundingValue), isDate(nextRoundingValue, tz)); assertThat("nextRounding value should be a rounded date", rounding.round(nextRoundingValue), isDate(nextRoundingValue, tz));
assertThat("values above nextRounding should round down there", rounding.round(nextRoundingValue + 1), assertThat("values above nextRounding should round down there", rounding.round(nextRoundingValue + 1),
isDate(nextRoundingValue, tz)); isDate(nextRoundingValue, tz));
@ -762,6 +837,18 @@ public class RoundingTests extends ESTestCase {
return true; return true;
} }
private static long randomDate() {
return Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00
}
private static long[] randomDateBounds() {
long b1 = randomDate();
long b2 = randomValueOtherThan(b1, RoundingTests::randomDate);
if (b1 < b2) {
return new long[] {b1, b2};
}
return new long[] {b2, b1};
}
private static long dateBetween(long lower, long upper) { private static long dateBetween(long lower, long upper) {
long dateBetween = randomLongBetween(lower, upper - 1); long dateBetween = randomLongBetween(lower, upper - 1);
assert lower <= dateBetween && dateBetween < upper; assert lower <= dateBetween && dateBetween < upper;

View File

@ -23,6 +23,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
@ -49,8 +50,17 @@ import static org.hamcrest.Matchers.equalTo;
public class DateHistogramAggregatorTests extends AggregatorTestCase { public class DateHistogramAggregatorTests extends AggregatorTestCase {
private static final String DATE_FIELD = "date"; /**
private static final String INSTANT_FIELD = "instant"; * A date that is always "aggregable" because it has doc values but may or
* may not have a search index. If it doesn't then we can't use our fancy
* date rounding mechanism that needs to know the minimum and maximum dates
* it is going to round because it ready *that* out of the search index.
*/
private static final String AGGREGABLE_DATE = "aggregable_date";
/**
* A date that is always "searchable" because it is indexed.
*/
private static final String SEARCHABLE_DATE = "searchable_date";
private static final List<String> dataset = Arrays.asList( private static final List<String> dataset = Arrays.asList(
"2010-03-12T01:07:45", "2010-03-12T01:07:45",
@ -66,7 +76,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
public void testMatchNoDocsDeprecatedInterval() throws IOException { public void testMatchNoDocsDeprecatedInterval() throws IOException {
testBothCases(new MatchNoDocsQuery(), dataset, testBothCases(new MatchNoDocsQuery(), dataset,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
assertEquals(0, histogram.getBuckets().size()); assertEquals(0, histogram.getBuckets().size());
assertFalse(AggregationInspectionHelper.hasValue(histogram)); assertFalse(AggregationInspectionHelper.hasValue(histogram));
@ -77,11 +87,11 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
public void testMatchNoDocs() throws IOException { public void testMatchNoDocs() throws IOException {
testBothCases(new MatchNoDocsQuery(), dataset, testBothCases(new MatchNoDocsQuery(), dataset,
aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> assertEquals(0, histogram.getBuckets().size()), false histogram -> assertEquals(0, histogram.getBuckets().size()), false
); );
testBothCases(new MatchNoDocsQuery(), dataset, testBothCases(new MatchNoDocsQuery(), dataset,
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE),
histogram -> assertEquals(0, histogram.getBuckets().size()), false histogram -> assertEquals(0, histogram.getBuckets().size()), false
); );
} }
@ -90,21 +100,21 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
Query query = new MatchAllDocsQuery(); Query query = new MatchAllDocsQuery();
testSearchCase(query, dataset, testSearchCase(query, dataset,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
assertEquals(6, histogram.getBuckets().size()); assertEquals(6, histogram.getBuckets().size());
assertTrue(AggregationInspectionHelper.hasValue(histogram)); assertTrue(AggregationInspectionHelper.hasValue(histogram));
}, false }, false
); );
testSearchAndReduceCase(query, dataset, testSearchAndReduceCase(query, dataset,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
assertEquals(8, histogram.getBuckets().size()); assertEquals(8, histogram.getBuckets().size());
assertTrue(AggregationInspectionHelper.hasValue(histogram)); assertTrue(AggregationInspectionHelper.hasValue(histogram));
}, false }, false
); );
testBothCases(query, dataset, testBothCases(query, dataset,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
assertEquals(6, histogram.getBuckets().size()); assertEquals(6, histogram.getBuckets().size());
assertTrue(AggregationInspectionHelper.hasValue(histogram)); assertTrue(AggregationInspectionHelper.hasValue(histogram));
@ -121,33 +131,34 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
foo.add(dataset.get(randomIntBetween(0, dataset.size()-1))); foo.add(dataset.get(randomIntBetween(0, dataset.size()-1)));
} }
testSearchAndReduceCase(query, foo, testSearchAndReduceCase(query, foo,
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD).order(BucketOrder.count(false)), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d"))
.field(AGGREGABLE_DATE).order(BucketOrder.count(false)),
histogram -> assertEquals(8, histogram.getBuckets().size()), false histogram -> assertEquals(8, histogram.getBuckets().size()), false
); );
testSearchCase(query, dataset, testSearchCase(query, dataset,
aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> assertEquals(6, histogram.getBuckets().size()), false histogram -> assertEquals(6, histogram.getBuckets().size()), false
); );
testSearchAndReduceCase(query, dataset, testSearchAndReduceCase(query, dataset,
aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> assertEquals(8, histogram.getBuckets().size()), false histogram -> assertEquals(8, histogram.getBuckets().size()), false
); );
testBothCases(query, dataset, testBothCases(query, dataset,
aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> assertEquals(6, histogram.getBuckets().size()), false histogram -> assertEquals(6, histogram.getBuckets().size()), false
); );
testSearchCase(query, dataset, testSearchCase(query, dataset,
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE),
histogram -> assertEquals(6, histogram.getBuckets().size()), false histogram -> assertEquals(6, histogram.getBuckets().size()), false
); );
testSearchAndReduceCase(query, dataset, testSearchAndReduceCase(query, dataset,
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE),
histogram -> assertEquals(8, histogram.getBuckets().size()), false histogram -> assertEquals(8, histogram.getBuckets().size()), false
); );
testBothCases(query, dataset, testBothCases(query, dataset,
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> assertEquals(6, histogram.getBuckets().size()), false histogram -> assertEquals(6, histogram.getBuckets().size()), false
); );
} }
@ -156,7 +167,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
Query query = new MatchNoDocsQuery(); Query query = new MatchNoDocsQuery();
List<String> dates = Collections.emptyList(); List<String> dates = Collections.emptyList();
Consumer<DateHistogramAggregationBuilder> aggregation = Consumer<DateHistogramAggregationBuilder> aggregation =
agg -> agg.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD); agg -> agg.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE);
testSearchCase(query, dates, aggregation, histogram -> { testSearchCase(query, dates, aggregation, histogram -> {
assertEquals(0, histogram.getBuckets().size()); assertEquals(0, histogram.getBuckets().size());
@ -173,7 +184,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
Query query = new MatchNoDocsQuery(); Query query = new MatchNoDocsQuery();
List<String> dates = Collections.emptyList(); List<String> dates = Collections.emptyList();
Consumer<DateHistogramAggregationBuilder> aggregation = agg -> Consumer<DateHistogramAggregationBuilder> aggregation = agg ->
agg.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD); agg.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE);
testSearchCase(query, dates, aggregation, testSearchCase(query, dates, aggregation,
histogram -> assertEquals(0, histogram.getBuckets().size()), false histogram -> assertEquals(0, histogram.getBuckets().size()), false
); );
@ -182,7 +193,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
); );
aggregation = agg -> aggregation = agg ->
agg.fixedInterval(new DateHistogramInterval("365d")).field(DATE_FIELD); agg.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE);
testSearchCase(query, dates, aggregation, testSearchCase(query, dates, aggregation,
histogram -> assertEquals(0, histogram.getBuckets().size()), false histogram -> assertEquals(0, histogram.getBuckets().size()), false
); );
@ -214,8 +225,8 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
} }
public void testIntervalYearDeprecated() throws IOException { public void testIntervalYearDeprecated() throws IOException {
testBothCases(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, testBothCases(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), dataset,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -237,8 +248,8 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
} }
public void testIntervalYear() throws IOException { public void testIntervalYear() throws IOException {
testBothCases(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, testBothCases(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), dataset,
aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(DATE_FIELD), aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -261,7 +272,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
public void testIntervalMonthDeprecated() throws IOException { public void testIntervalMonthDeprecated() throws IOException {
testBothCases(new MatchAllDocsQuery(), testBothCases(new MatchAllDocsQuery(),
Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"),
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -285,7 +296,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
public void testIntervalMonth() throws IOException { public void testIntervalMonth() throws IOException {
testBothCases(new MatchAllDocsQuery(), testBothCases(new MatchAllDocsQuery(),
Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.MONTH).field(DATE_FIELD), aggregation -> aggregation.calendarInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -316,7 +327,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-03", "2017-02-03",
"2017-02-05" "2017-02-05"
), ),
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(4, buckets.size()); assertEquals(4, buckets.size());
@ -352,7 +363,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-03", "2017-02-03",
"2017-02-05" "2017-02-05"
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(4, buckets.size()); assertEquals(4, buckets.size());
@ -384,7 +395,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-03", "2017-02-03",
"2017-02-05" "2017-02-05"
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("24h")).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("24h")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(4, buckets.size()); assertEquals(4, buckets.size());
@ -422,7 +433,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T16:48:00.000Z", "2017-02-01T16:48:00.000Z",
"2017-02-01T16:59:00.000Z" "2017-02-01T16:59:00.000Z"
), ),
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.HOUR).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.HOUR).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(6, buckets.size()); assertEquals(6, buckets.size());
@ -469,7 +480,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T16:48:00.000Z", "2017-02-01T16:48:00.000Z",
"2017-02-01T16:59:00.000Z" "2017-02-01T16:59:00.000Z"
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.HOUR).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.calendarInterval(DateHistogramInterval.HOUR).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(6, buckets.size()); assertEquals(6, buckets.size());
@ -512,7 +523,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T16:48:00.000Z", "2017-02-01T16:48:00.000Z",
"2017-02-01T16:59:00.000Z" "2017-02-01T16:59:00.000Z"
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60m")).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60m")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(6, buckets.size()); assertEquals(6, buckets.size());
@ -553,7 +564,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T09:16:04.000Z", "2017-02-01T09:16:04.000Z",
"2017-02-01T09:16:42.000Z" "2017-02-01T09:16:42.000Z"
), ),
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MINUTE).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MINUTE).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -583,7 +594,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T09:16:04.000Z", "2017-02-01T09:16:04.000Z",
"2017-02-01T09:16:42.000Z" "2017-02-01T09:16:42.000Z"
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.MINUTE).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.calendarInterval(DateHistogramInterval.MINUTE).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -609,7 +620,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T09:16:04.000Z", "2017-02-01T09:16:04.000Z",
"2017-02-01T09:16:42.000Z" "2017-02-01T09:16:42.000Z"
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60s")).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("60s")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -639,7 +650,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T00:00:37.210Z", "2017-02-01T00:00:37.210Z",
"2017-02-01T00:00:37.380Z" "2017-02-01T00:00:37.380Z"
), ),
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.SECOND).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.SECOND).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -670,7 +681,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T00:00:37.210Z", "2017-02-01T00:00:37.210Z",
"2017-02-01T00:00:37.380Z" "2017-02-01T00:00:37.380Z"
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.SECOND).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.calendarInterval(DateHistogramInterval.SECOND).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -697,7 +708,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T00:00:37.210Z", "2017-02-01T00:00:37.210Z",
"2017-02-01T00:00:37.380Z" "2017-02-01T00:00:37.380Z"
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("1000ms")).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("1000ms")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -727,7 +738,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T00:00:37.210328172Z", "2017-02-01T00:00:37.210328172Z",
"2017-02-01T00:00:37.380889483Z" "2017-02-01T00:00:37.380889483Z"
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.SECOND).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.calendarInterval(DateHistogramInterval.SECOND).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -754,7 +765,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-01T00:00:37.210328172Z", "2017-02-01T00:00:37.210328172Z",
"2017-02-01T00:00:37.380889483Z" "2017-02-01T00:00:37.380889483Z"
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("1000ms")).field(DATE_FIELD).minDocCount(1L), aggregation -> aggregation.fixedInterval(new DateHistogramInterval("1000ms")).field(AGGREGABLE_DATE).minDocCount(1L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(3, buckets.size()); assertEquals(3, buckets.size());
@ -775,7 +786,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
} }
public void testMinDocCountDeprecated() throws IOException { public void testMinDocCountDeprecated() throws IOException {
Query query = LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z")); Query query = LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z"));
List<String> timestamps = Arrays.asList( List<String> timestamps = Arrays.asList(
"2017-02-01T00:00:05.015Z", "2017-02-01T00:00:05.015Z",
"2017-02-01T00:00:11.299Z", "2017-02-01T00:00:11.299Z",
@ -786,7 +797,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
// 5 sec interval with minDocCount = 0 // 5 sec interval with minDocCount = 0
testSearchAndReduceCase(query, timestamps, testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(4, buckets.size()); assertEquals(4, buckets.size());
@ -811,7 +822,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
// 5 sec interval with minDocCount = 3 // 5 sec interval with minDocCount = 3
testSearchAndReduceCase(query, timestamps, testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(3L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(1, buckets.size()); assertEquals(1, buckets.size());
@ -825,7 +836,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
} }
public void testMinDocCount() throws IOException { public void testMinDocCount() throws IOException {
Query query = LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z")); Query query = LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2017-02-01T00:00:00.000Z"), asLong("2017-02-01T00:00:30.000Z"));
List<String> timestamps = Arrays.asList( List<String> timestamps = Arrays.asList(
"2017-02-01T00:00:05.015Z", "2017-02-01T00:00:05.015Z",
"2017-02-01T00:00:11.299Z", "2017-02-01T00:00:11.299Z",
@ -836,7 +847,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
// 5 sec interval with minDocCount = 0 // 5 sec interval with minDocCount = 0
testSearchAndReduceCase(query, timestamps, testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(4, buckets.size()); assertEquals(4, buckets.size());
@ -861,7 +872,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
// 5 sec interval with minDocCount = 3 // 5 sec interval with minDocCount = 3
testSearchAndReduceCase(query, timestamps, testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(3L), aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L),
histogram -> { histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets(); List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(1, buckets.size()); assertEquals(1, buckets.size());
@ -882,25 +893,25 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
); );
expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps,
aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE),
histogram -> {}, 2, false)); histogram -> {}, 2, false));
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE),
histogram -> {}, 2, false)); histogram -> {}, 2, false));
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L),
histogram -> {}, 100, false)); histogram -> {}, 100, false));
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation ->
aggregation.fixedInterval(DateHistogramInterval.seconds(5)) aggregation.fixedInterval(DateHistogramInterval.seconds(5))
.field(DATE_FIELD) .field(AGGREGABLE_DATE)
.subAggregation( .subAggregation(
AggregationBuilders.dateHistogram("1") AggregationBuilders.dateHistogram("1")
.fixedInterval(DateHistogramInterval.seconds(5)) .fixedInterval(DateHistogramInterval.seconds(5))
.field(DATE_FIELD) .field(AGGREGABLE_DATE)
), ),
histogram -> {}, 5, false)); histogram -> {}, 5, false));
} }
@ -914,25 +925,25 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
); );
expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE),
histogram -> {}, 2, false)); histogram -> {}, 2, false));
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE),
histogram -> {}, 2, false)); histogram -> {}, 2, false));
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L),
histogram -> {}, 100, false)); histogram -> {}, 100, false));
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
aggregation -> aggregation ->
aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)) aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5))
.field(DATE_FIELD) .field(AGGREGABLE_DATE)
.subAggregation( .subAggregation(
AggregationBuilders.dateHistogram("1") AggregationBuilders.dateHistogram("1")
.dateHistogramInterval(DateHistogramInterval.seconds(5)) .dateHistogramInterval(DateHistogramInterval.seconds(5))
.field(DATE_FIELD) .field(AGGREGABLE_DATE)
), ),
histogram -> {}, 5, false)); histogram -> {}, 5, false));
assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.");
@ -949,7 +960,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-03", "2017-02-03",
"2017-02-05" "2017-02-05"
), ),
aggregation -> aggregation.fixedInterval(DateHistogramInterval.WEEK).field(DATE_FIELD), aggregation -> aggregation.fixedInterval(DateHistogramInterval.WEEK).field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("failed to parse setting [date_histogram.fixedInterval] with value [1w] as a time value: " + assertThat(e.getMessage(), equalTo("failed to parse setting [date_histogram.fixedInterval] with value [1w] as a time value: " +
@ -967,7 +978,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
"2017-02-03", "2017-02-03",
"2017-02-05" "2017-02-05"
), ),
aggregation -> aggregation.calendarInterval(new DateHistogramInterval("5d")).field(DATE_FIELD), aggregation -> aggregation.calendarInterval(new DateHistogramInterval("5d")).field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("The supplied interval [5d] could not be parsed as a calendar interval.")); assertThat(e.getMessage(), equalTo("The supplied interval [5d] could not be parsed as a calendar interval."));
@ -986,7 +997,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY)
.fixedInterval(new DateHistogramInterval("2d")) .fixedInterval(new DateHistogramInterval("2d"))
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [calendar_interval] configuration option.")); assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [calendar_interval] configuration option."));
@ -1005,7 +1016,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d"))
.calendarInterval(DateHistogramInterval.DAY) .calendarInterval(DateHistogramInterval.DAY)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [fixed_interval] configuration option.")); assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [fixed_interval] configuration option."));
@ -1024,7 +1035,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d"))
.dateHistogramInterval(DateHistogramInterval.DAY) .dateHistogramInterval(DateHistogramInterval.DAY)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options."));
@ -1041,7 +1052,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY)
.dateHistogramInterval(DateHistogramInterval.DAY) .dateHistogramInterval(DateHistogramInterval.DAY)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options."));
@ -1058,7 +1069,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d")) aggregation -> aggregation.fixedInterval(new DateHistogramInterval("2d"))
.interval(1000) .interval(1000)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options."));
@ -1075,7 +1086,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY) aggregation -> aggregation.calendarInterval(DateHistogramInterval.DAY)
.interval(1000) .interval(1000)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options.")); assertThat(e.getMessage(), equalTo("Cannot use [interval] with [fixed_interval] or [calendar_interval] configuration options."));
@ -1094,7 +1105,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation .dateHistogramInterval(DateHistogramInterval.DAY) aggregation -> aggregation .dateHistogramInterval(DateHistogramInterval.DAY)
.fixedInterval(new DateHistogramInterval("2d")) .fixedInterval(new DateHistogramInterval("2d"))
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [interval] configuration option.")); assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [interval] configuration option."));
@ -1111,7 +1122,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY) aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.DAY)
.calendarInterval(DateHistogramInterval.DAY) .calendarInterval(DateHistogramInterval.DAY)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [interval] configuration option.")); assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [interval] configuration option."));
@ -1128,7 +1139,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.interval(1000) aggregation -> aggregation.interval(1000)
.fixedInterval(new DateHistogramInterval("2d")) .fixedInterval(new DateHistogramInterval("2d"))
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [interval] configuration option.")); assertThat(e.getMessage(), equalTo("Cannot use [fixed_interval] with [interval] configuration option."));
@ -1145,7 +1156,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
), ),
aggregation -> aggregation.interval(1000) aggregation -> aggregation.interval(1000)
.calendarInterval(DateHistogramInterval.DAY) .calendarInterval(DateHistogramInterval.DAY)
.field(DATE_FIELD), .field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [interval] configuration option.")); assertThat(e.getMessage(), equalTo("Cannot use [calendar_interval] with [interval] configuration option."));
@ -1156,7 +1167,7 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
public void testIllegalInterval() throws IOException { public void testIllegalInterval() throws IOException {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(), IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(),
Collections.emptyList(), Collections.emptyList(),
aggregation -> aggregation.dateHistogramInterval(new DateHistogramInterval("foobar")).field(DATE_FIELD), aggregation -> aggregation.dateHistogramInterval(new DateHistogramInterval("foobar")).field(AGGREGABLE_DATE),
histogram -> {}, false histogram -> {}, false
)); ));
assertThat(e.getMessage(), equalTo("Unable to parse interval [foobar]")); assertThat(e.getMessage(), equalTo("Unable to parse interval [foobar]"));
@ -1210,13 +1221,17 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
Consumer<InternalDateHistogram> verify, Consumer<InternalDateHistogram> verify,
int maxBucket, boolean useNanosecondResolution) throws IOException { int maxBucket, boolean useNanosecondResolution) throws IOException {
boolean aggregableDateIsSearchable = randomBoolean();
DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name");
if (useNanosecondResolution) {
builder.withResolution(DateFieldMapper.Resolution.NANOSECONDS);
}
DateFieldMapper.DateFieldType fieldType = builder.fieldType();
fieldType.setHasDocValues(true);
fieldType.setIndexOptions(aggregableDateIsSearchable ? IndexOptions.DOCS : IndexOptions.NONE);
try (Directory directory = newDirectory()) { try (Directory directory = newDirectory()) {
DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name");
if (useNanosecondResolution) {
builder.withResolution(DateFieldMapper.Resolution.NANOSECONDS);
}
DateFieldMapper.DateFieldType fieldType = builder.fieldType();
fieldType.setHasDocValues(true);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document(); Document document = new Document();
@ -1226,8 +1241,11 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
} }
long instant = asLong(date, fieldType); long instant = asLong(date, fieldType);
document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant));
document.add(new LongPoint(INSTANT_FIELD, instant)); if (aggregableDateIsSearchable) {
document.add(new LongPoint(AGGREGABLE_DATE, instant));
}
document.add(new LongPoint(SEARCHABLE_DATE, instant));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document.clear(); document.clear();
} }

View File

@ -6,19 +6,29 @@
package org.elasticsearch.xpack.analytics.aggregations.support; package org.elasticsearch.xpack.analytics.aggregations.support;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.Rounding.Prepared;
import org.elasticsearch.index.fielddata.DocValueBits; import org.elasticsearch.index.fielddata.DocValueBits;
import org.elasticsearch.index.fielddata.HistogramValues; import org.elasticsearch.index.fielddata.HistogramValues;
import org.elasticsearch.index.fielddata.IndexHistogramFieldData; import org.elasticsearch.index.fielddata.IndexHistogramFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import java.io.IOException; import java.io.IOException;
import java.util.function.Function;
public class HistogramValuesSource { public class HistogramValuesSource {
public abstract static class Histogram extends org.elasticsearch.search.aggregations.support.ValuesSource { public abstract static class Histogram extends org.elasticsearch.search.aggregations.support.ValuesSource {
public abstract HistogramValues getHistogramValues(LeafReaderContext context) throws IOException; public abstract HistogramValues getHistogramValues(LeafReaderContext context) throws IOException;
@Override
public Function<Rounding, Prepared> roundingPreparer(IndexReader reader) throws IOException {
throw new AggregationExecutionException("can't round a [histogram]");
}
public static class Fielddata extends Histogram { public static class Fielddata extends Histogram {
protected final IndexHistogramFieldData indexFieldData; protected final IndexHistogramFieldData indexFieldData;

View File

@ -6,15 +6,20 @@
package org.elasticsearch.xpack.spatial.search.aggregations.support; package org.elasticsearch.xpack.spatial.search.aggregations.support;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.Rounding.Prepared;
import org.elasticsearch.index.fielddata.DocValueBits; import org.elasticsearch.index.fielddata.DocValueBits;
import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.xpack.spatial.index.fielddata.IndexGeoShapeFieldData; import org.elasticsearch.xpack.spatial.index.fielddata.IndexGeoShapeFieldData;
import org.elasticsearch.xpack.spatial.index.fielddata.MultiGeoShapeValues; import org.elasticsearch.xpack.spatial.index.fielddata.MultiGeoShapeValues;
import java.io.IOException; import java.io.IOException;
import java.util.function.Function;
public abstract class GeoShapeValuesSource extends ValuesSource { public abstract class GeoShapeValuesSource extends ValuesSource {
public static final GeoShapeValuesSource EMPTY = new GeoShapeValuesSource() { public static final GeoShapeValuesSource EMPTY = new GeoShapeValuesSource() {
@ -33,6 +38,11 @@ public abstract class GeoShapeValuesSource extends ValuesSource {
public abstract MultiGeoShapeValues geoShapeValues(LeafReaderContext context); public abstract MultiGeoShapeValues geoShapeValues(LeafReaderContext context);
@Override
public Function<Rounding, Prepared> roundingPreparer(IndexReader reader) throws IOException {
throw new AggregationExecutionException("can't round a [geo_shape]");
}
@Override @Override
public DocValueBits docsWithValue(LeafReaderContext context) throws IOException { public DocValueBits docsWithValue(LeafReaderContext context) throws IOException {
MultiGeoShapeValues values = geoShapeValues(context); MultiGeoShapeValues values = geoShapeValues(context);