Merge pull request #12886 from cbuescher/fix/12278
Make ValueParser.DateMath aware of timezone setting
This commit is contained in:
commit
0fc96ede69
|
@ -69,14 +69,14 @@ public class ValueFormat {
|
|||
public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT);
|
||||
|
||||
public static DateTime format(String format, DateTimeZone timezone) {
|
||||
return new DateTime(format, new ValueFormatter.DateTime(format, timezone), new ValueParser.DateMath(format));
|
||||
return new DateTime(format, new ValueFormatter.DateTime(format, timezone), new ValueParser.DateMath(format, timezone));
|
||||
}
|
||||
|
||||
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType, DateTimeZone timezone) {
|
||||
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType, timezone), ValueParser.DateMath.mapper(fieldType));
|
||||
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType, timezone), ValueParser.DateMath.mapper(fieldType, timezone));
|
||||
}
|
||||
|
||||
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
|
||||
private DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
|
||||
super(pattern, formatter, parser);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.support.format;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
|
@ -25,6 +26,7 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
|||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.DecimalFormatSymbols;
|
||||
|
@ -80,16 +82,21 @@ public interface ValueParser {
|
|||
*/
|
||||
static class DateMath implements ValueParser {
|
||||
|
||||
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER));
|
||||
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER), DateTimeZone.UTC);
|
||||
|
||||
private DateMathParser parser;
|
||||
|
||||
public DateMath(String format) {
|
||||
this(new DateMathParser(Joda.forPattern(format)));
|
||||
private DateTimeZone timezone = DateTimeZone.UTC;
|
||||
|
||||
public DateMath(String format, DateTimeZone timezone) {
|
||||
this(new DateMathParser(Joda.forPattern(format)), timezone);
|
||||
}
|
||||
|
||||
public DateMath(DateMathParser parser) {
|
||||
public DateMath(DateMathParser parser, @Nullable DateTimeZone timeZone) {
|
||||
this.parser = parser;
|
||||
if (timeZone != null) {
|
||||
this.timezone = timeZone;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -100,7 +107,7 @@ public interface ValueParser {
|
|||
return searchContext.nowInMillis();
|
||||
}
|
||||
};
|
||||
return parser.parse(value, now);
|
||||
return parser.parse(value, now, false, timezone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -108,8 +115,8 @@ public interface ValueParser {
|
|||
return parseLong(value, searchContext);
|
||||
}
|
||||
|
||||
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType) {
|
||||
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()));
|
||||
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType, @Nullable DateTimeZone timezone) {
|
||||
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()), timezone);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,9 +21,11 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
|
@ -42,6 +44,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -558,7 +561,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
assertThat(bucket.getDocCount(), equalTo(3l));
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
[ Jan 2, Feb 3]
|
||||
|
@ -904,7 +907,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
assertThat(bucket.getDocCount(), equalTo(3l));
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
[ Jan 2, Feb 3]
|
||||
|
@ -1195,6 +1198,70 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test date histogram aggregation with hour interval, timezone shift and
|
||||
* extended bounds (see https://github.com/elastic/elasticsearch/issues/12278)
|
||||
*/
|
||||
@Test
|
||||
public void singleValueField_WithExtendedBoundsTimezone() throws Exception {
|
||||
|
||||
String index = "test12278";
|
||||
prepareCreate(index)
|
||||
.setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
|
||||
.execute().actionGet();
|
||||
|
||||
DateMathParser parser = new DateMathParser(Joda.getStrictStandardDateFormatter());
|
||||
|
||||
final Callable<Long> callable = new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return System.currentTimeMillis();
|
||||
}
|
||||
};
|
||||
|
||||
// we pick a random timezone offset of +12/-12 hours and insert two documents
|
||||
// one at 00:00 in that time zone and one at 12:00
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
int timeZoneHourOffset = randomIntBetween(-12, 12);
|
||||
DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
|
||||
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", callable, false, timezone), DateTimeZone.UTC);
|
||||
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", callable, false, timezone), DateTimeZone.UTC);
|
||||
builders.add(indexDoc(index, timeZoneStartToday, 1));
|
||||
builders.add(indexDoc(index, timeZoneNoonToday, 2));
|
||||
indexRandom(true, builders);
|
||||
ensureSearchable(index);
|
||||
|
||||
SearchResponse response = null;
|
||||
// retrieve those docs with the same time zone and extended bounds
|
||||
response = client()
|
||||
.prepareSearch(index)
|
||||
.setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID()))
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").interval(DateHistogramInterval.hours(1)).timeZone(timezone.getID()).minDocCount(0)
|
||||
.extendedBounds("now/d", "now/d+23h")
|
||||
).execute().actionGet();
|
||||
assertSearchResponse(response);
|
||||
|
||||
assertThat("Expected 24 buckets for one day aggregation with hourly interval", response.getHits().totalHits(), equalTo(2l));
|
||||
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
assertThat(histo, notNullValue());
|
||||
assertThat(histo.getName(), equalTo("histo"));
|
||||
List<? extends Bucket> buckets = histo.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(24));
|
||||
|
||||
for (int i = 0; i < buckets.size(); i++) {
|
||||
Histogram.Bucket bucket = buckets.get(i);
|
||||
assertThat(bucket, notNullValue());
|
||||
assertThat("Bucket " + i +" had wrong key", (DateTime) bucket.getKey(), equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
|
||||
if (i == 0 || i == 12) {
|
||||
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||
} else {
|
||||
assertThat(bucket.getDocCount(), equalTo(0l));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void singleValue_WithMultipleDateFormatsFromMapping() throws Exception {
|
||||
|
||||
|
@ -1233,7 +1300,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.execute().actionGet();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
||||
DateTimeZone tz = DateTimeZone.forID("+01:00");
|
||||
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
|
|
Loading…
Reference in New Issue