Fixed parsing time zones as numeric value in DateHistogramParser

Closes #5057
This commit is contained in:
uboness 2014-02-09 22:05:25 +01:00
parent 8bf302f6e2
commit b3cb59d024
2 changed files with 95 additions and 19 deletions

View File

@ -110,13 +110,13 @@ public class DateHistogramParser implements Aggregator.Parser {
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) {
preZone = parseZone(parser, token);
preZone = parseZone(parser.text());
} else if ("pre_zone".equals(currentFieldName) || "preZone".equals(currentFieldName)) {
preZone = parseZone(parser, token);
preZone = parseZone(parser.text());
} else if ("pre_zone_adjust_large_interval".equals(currentFieldName) || "preZoneAdjustLargeInterval".equals(currentFieldName)) {
preZoneAdjustLargeInterval = parser.booleanValue();
} else if ("post_zone".equals(currentFieldName) || "postZone".equals(currentFieldName)) {
postZone = parseZone(parser, token);
postZone = parseZone(parser.text());
} else if ("pre_offset".equals(currentFieldName) || "preOffset".equals(currentFieldName)) {
preOffset = parseOffset(parser.text());
} else if ("post_offset".equals(currentFieldName) || "postOffset".equals(currentFieldName)) {
@ -139,6 +139,12 @@ public class DateHistogramParser implements Aggregator.Parser {
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if ("min_doc_count".equals(currentFieldName) || "minDocCount".equals(currentFieldName)) {
minDocCount = parser.longValue();
} else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) {
preZone = DateTimeZone.forOffsetHours(parser.intValue());
} else if ("pre_zone".equals(currentFieldName) || "preZone".equals(currentFieldName)) {
preZone = DateTimeZone.forOffsetHours(parser.intValue());
} else if ("post_zone".equals(currentFieldName) || "postZone".equals(currentFieldName)) {
postZone = DateTimeZone.forOffsetHours(parser.intValue());
} else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "].");
}
@ -247,23 +253,18 @@ public class DateHistogramParser implements Aggregator.Parser {
return TimeValue.parseTimeValue(offset.substring(beginIndex), null).millis();
}
private DateTimeZone parseZone(XContentParser parser, XContentParser.Token token) throws IOException {
if (token == XContentParser.Token.VALUE_NUMBER) {
return DateTimeZone.forOffsetHours(parser.intValue());
private DateTimeZone parseZone(String text) throws IOException {
int index = text.indexOf(':');
if (index != -1) {
int beginIndex = text.charAt(0) == '+' ? 1 : 0;
// format like -02:30
return DateTimeZone.forOffsetHoursMinutes(
Integer.parseInt(text.substring(beginIndex, index)),
Integer.parseInt(text.substring(index + 1))
);
} else {
String text = parser.text();
int index = text.indexOf(':');
if (index != -1) {
int beginIndex = text.charAt(0) == '+' ? 1 : 0;
// format like -02:30
return DateTimeZone.forOffsetHoursMinutes(
Integer.parseInt(text.substring(beginIndex, index)),
Integer.parseInt(text.substring(index + 1))
);
} else {
// id, listed here: http://joda-time.sourceforge.net/timezones.html
return DateTimeZone.forID(text);
}
// id, listed here: http://joda-time.sourceforge.net/timezones.html
return DateTimeZone.forID(text);
}
}

View File

@ -22,6 +22,8 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.metrics.max.Max;
@ -33,6 +35,7 @@ import org.joda.time.DateTimeZone;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -118,6 +121,78 @@ public class DateHistogramTests extends ElasticsearchIntegrationTest {
assertThat(bucket.getDocCount(), equalTo(3l));
}
@Test
public void singleValuedField_WithPostTimeZone() throws Exception {
SearchResponse response;
if (randomBoolean()) {
response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).postZone("-01:00"))
.execute().actionGet();
} else {
// checking post_zone setting as an int
response = client().prepareSearch("idx")
.addAggregation(new AbstractAggregationBuilder("histo", "date_histogram") {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.startObject(name)
.startObject(type)
.field("field", "date")
.field("interval", "1d")
.field("post_zone", -1)
.endObject()
.endObject();
}
})
.execute().actionGet();
}
assertSearchResponse(response);
DateHistogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(6));
long key = new DateTime(2012, 1, 2, 0, 0, DateTimeZone.forID("+01:00")).getMillis();
DateHistogram.Bucket bucket = histo.getBucketByKey(key);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 2, 2, 0, 0, DateTimeZone.forID("+01:00")).getMillis();
bucket = histo.getBucketByKey(key);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 2, 15, 0, 0, DateTimeZone.forID("+01:00")).getMillis();
bucket = histo.getBucketByKey(key);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 3, 2, 0, 0, DateTimeZone.forID("+01:00")).getMillis();
bucket = histo.getBucketByKey(key);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 3, 15, 0, 0, DateTimeZone.forID("+01:00")).getMillis();
bucket = histo.getBucketByKey(key);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
key = new DateTime(2012, 3, 23, 0, 0, DateTimeZone.forID("+01:00")).getMillis();
bucket = histo.getBucketByKey(key);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
}
@Test
public void singleValuedField_OrderedByKeyAsc() throws Exception {
SearchResponse response = client().prepareSearch("idx")