mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-25 22:36:20 +00:00
Date Histogram Facet: Improve time zone handling, add factor option, closes #1580.
This commit is contained in:
parent
53b1b6e4bb
commit
8c6b2a3077
@ -0,0 +1,240 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.joda;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.joda.time.DateTimeConstants;
|
||||
import org.joda.time.DateTimeField;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
/**
|
||||
*/
|
||||
public abstract class TimeZoneRounding {
|
||||
|
||||
public abstract long calc(long utcMillis);
|
||||
|
||||
public static Builder builder(DateTimeField field) {
|
||||
return new Builder(field);
|
||||
}
|
||||
|
||||
public static Builder builder(TimeValue interval) {
|
||||
return new Builder(interval);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
||||
private DateTimeField field;
|
||||
private long interval = -1;
|
||||
|
||||
private DateTimeZone preTz = DateTimeZone.UTC;
|
||||
private DateTimeZone postTz = DateTimeZone.UTC;
|
||||
|
||||
private float factor = 1.0f;
|
||||
|
||||
public Builder(DateTimeField field) {
|
||||
this.field = field;
|
||||
this.interval = -1;
|
||||
}
|
||||
|
||||
public Builder(TimeValue interval) {
|
||||
this.field = null;
|
||||
this.interval = interval.millis();
|
||||
}
|
||||
|
||||
public Builder preZone(DateTimeZone preTz) {
|
||||
this.preTz = preTz;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder postZone(DateTimeZone postTz) {
|
||||
this.postTz = postTz;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder factor(float factor) {
|
||||
this.factor = factor;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TimeZoneRounding build() {
|
||||
TimeZoneRounding timeZoneRounding;
|
||||
if (field != null) {
|
||||
if (preTz.equals(DateTimeZone.UTC) && postTz.equals(DateTimeZone.UTC)) {
|
||||
return new UTCTimeZoneRoundingFloor(field);
|
||||
} else if (field.getDurationField().getUnitMillis() < DateTimeConstants.MILLIS_PER_HOUR * 12) {
|
||||
timeZoneRounding = new TimeTimeZoneRoundingFloor(field, preTz, postTz);
|
||||
} else {
|
||||
timeZoneRounding = new DayTimeZoneRoundingFloor(field, preTz, postTz);
|
||||
}
|
||||
} else {
|
||||
if (preTz.equals(DateTimeZone.UTC) && postTz.equals(DateTimeZone.UTC)) {
|
||||
return new UTCIntervalTimeZoneRounding(interval);
|
||||
} else if (interval < DateTimeConstants.MILLIS_PER_HOUR * 12) {
|
||||
timeZoneRounding = new TimeIntervalTimeZoneRounding(interval, preTz, postTz);
|
||||
} else {
|
||||
timeZoneRounding = new DayIntervalTimeZoneRounding(interval, preTz, postTz);
|
||||
}
|
||||
}
|
||||
if (factor != 1.0f) {
|
||||
timeZoneRounding = new FactorTimeZoneRounding(timeZoneRounding, factor);
|
||||
}
|
||||
return timeZoneRounding;
|
||||
}
|
||||
}
|
||||
|
||||
static class TimeTimeZoneRoundingFloor extends TimeZoneRounding {
|
||||
|
||||
private final DateTimeField field;
|
||||
private final DateTimeZone preTz;
|
||||
private final DateTimeZone postTz;
|
||||
|
||||
TimeTimeZoneRoundingFloor(DateTimeField field, DateTimeZone preTz, DateTimeZone postTz) {
|
||||
this.field = field;
|
||||
this.preTz = preTz;
|
||||
this.postTz = postTz;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
long time = utcMillis + preTz.getOffset(utcMillis);
|
||||
time = field.roundFloor(time);
|
||||
// now, time is still in local, move it to UTC
|
||||
time = time - preTz.getOffset(time);
|
||||
// now apply post Tz
|
||||
time = time + postTz.getOffset(time);
|
||||
return time;
|
||||
}
|
||||
}
|
||||
|
||||
static class UTCTimeZoneRoundingFloor extends TimeZoneRounding {
|
||||
|
||||
private final DateTimeField field;
|
||||
|
||||
UTCTimeZoneRoundingFloor(DateTimeField field) {
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
return field.roundFloor(utcMillis);
|
||||
}
|
||||
}
|
||||
|
||||
static class DayTimeZoneRoundingFloor extends TimeZoneRounding {
|
||||
private final DateTimeField field;
|
||||
private final DateTimeZone preTz;
|
||||
private final DateTimeZone postTz;
|
||||
|
||||
DayTimeZoneRoundingFloor(DateTimeField field, DateTimeZone preTz, DateTimeZone postTz) {
|
||||
this.field = field;
|
||||
this.preTz = preTz;
|
||||
this.postTz = postTz;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
long time = utcMillis + preTz.getOffset(utcMillis);
|
||||
time = field.roundFloor(time);
|
||||
// after rounding, since its day level (and above), its actually UTC!
|
||||
// now apply post Tz
|
||||
time = time + postTz.getOffset(time);
|
||||
return time;
|
||||
}
|
||||
}
|
||||
|
||||
static class UTCIntervalTimeZoneRounding extends TimeZoneRounding {
|
||||
|
||||
private final long interval;
|
||||
|
||||
UTCIntervalTimeZoneRounding(long interval) {
|
||||
this.interval = interval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
return ((utcMillis / interval) * interval);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class TimeIntervalTimeZoneRounding extends TimeZoneRounding {
|
||||
|
||||
private final long interval;
|
||||
private final DateTimeZone preTz;
|
||||
private final DateTimeZone postTz;
|
||||
|
||||
TimeIntervalTimeZoneRounding(long interval, DateTimeZone preTz, DateTimeZone postTz) {
|
||||
this.interval = interval;
|
||||
this.preTz = preTz;
|
||||
this.postTz = postTz;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
long time = utcMillis + preTz.getOffset(utcMillis);
|
||||
time = ((time / interval) * interval);
|
||||
// now, time is still in local, move it to UTC
|
||||
time = time - preTz.getOffset(time);
|
||||
// now apply post Tz
|
||||
time = time + postTz.getOffset(time);
|
||||
return time;
|
||||
}
|
||||
}
|
||||
|
||||
static class DayIntervalTimeZoneRounding extends TimeZoneRounding {
|
||||
|
||||
private final long interval;
|
||||
private final DateTimeZone preTz;
|
||||
private final DateTimeZone postTz;
|
||||
|
||||
DayIntervalTimeZoneRounding(long interval, DateTimeZone preTz, DateTimeZone postTz) {
|
||||
this.interval = interval;
|
||||
this.preTz = preTz;
|
||||
this.postTz = postTz;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
long time = utcMillis + preTz.getOffset(utcMillis);
|
||||
time = ((time / interval) * interval);
|
||||
// after rounding, since its day level (and above), its actually UTC!
|
||||
// now apply post Tz
|
||||
time = time + postTz.getOffset(time);
|
||||
return time;
|
||||
}
|
||||
}
|
||||
|
||||
static class FactorTimeZoneRounding extends TimeZoneRounding {
|
||||
|
||||
private final TimeZoneRounding timeZoneRounding;
|
||||
|
||||
private final float factor;
|
||||
|
||||
FactorTimeZoneRounding(TimeZoneRounding timeZoneRounding, float factor) {
|
||||
this.timeZoneRounding = timeZoneRounding;
|
||||
this.factor = factor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long calc(long utcMillis) {
|
||||
return timeZoneRounding.calc((long) (factor * utcMillis));
|
||||
}
|
||||
}
|
||||
}
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.facet.datehistogram;
|
||||
import gnu.trove.map.hash.TLongLongHashMap;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.joda.TimeZoneRounding;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
import org.elasticsearch.index.field.data.longs.LongFieldData;
|
||||
@ -31,22 +32,17 @@ import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A date histogram facet collector that uses the same field as the key as well as the
|
||||
* value.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final String indexFieldName;
|
||||
|
||||
private final MutableDateTime dateTime;
|
||||
|
||||
private final DateHistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
@ -57,9 +53,8 @@ public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final DateHistogramProc histoProc;
|
||||
|
||||
public CountDateHistogramFacetCollector(String facetName, String fieldName, MutableDateTime dateTime, long interval, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public CountDateHistogramFacetCollector(String facetName, String fieldName, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.dateTime = dateTime;
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
@ -77,17 +72,12 @@ public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
if (interval == 1) {
|
||||
histoProc = new DateHistogramProc();
|
||||
} else {
|
||||
histoProc = new IntervalDateHistogramProc(interval);
|
||||
}
|
||||
histoProc = new DateHistogramProc(tzRounding);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, dateTime, histoProc);
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -100,36 +90,23 @@ public class CountDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
return new InternalCountDateHistogramFacet(facetName, comparatorType, histoProc.counts(), true);
|
||||
}
|
||||
|
||||
public static long bucket(long value, long interval) {
|
||||
return ((value / interval) * interval);
|
||||
}
|
||||
public static class DateHistogramProc implements LongFieldData.LongValueInDocProc {
|
||||
|
||||
public static class DateHistogramProc implements LongFieldData.DateValueInDocProc {
|
||||
private final TLongLongHashMap counts = CacheRecycler.popLongLongMap();
|
||||
|
||||
protected final TLongLongHashMap counts = CacheRecycler.popLongLongMap();
|
||||
private final TimeZoneRounding tzRounding;
|
||||
|
||||
public DateHistogramProc(TimeZoneRounding tzRounding) {
|
||||
this.tzRounding = tzRounding;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, MutableDateTime dateTime) {
|
||||
counts.adjustOrPutValue(dateTime.getMillis(), 1, 1);
|
||||
public void onValue(int docId, long value) {
|
||||
counts.adjustOrPutValue(tzRounding.calc(value), 1, 1);
|
||||
}
|
||||
|
||||
public TLongLongHashMap counts() {
|
||||
return counts;
|
||||
}
|
||||
}
|
||||
|
||||
public static class IntervalDateHistogramProc extends DateHistogramProc {
|
||||
|
||||
private final long interval;
|
||||
|
||||
public IntervalDateHistogramProc(long interval) {
|
||||
this.interval = interval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, MutableDateTime dateTime) {
|
||||
long bucket = bucket(dateTime.getMillis(), interval);
|
||||
counts.adjustOrPutValue(bucket, 1, 1);
|
||||
}
|
||||
}
|
||||
}
|
@ -20,11 +20,10 @@
|
||||
package org.elasticsearch.search.facet.datehistogram;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import gnu.trove.impl.Constants;
|
||||
import gnu.trove.map.hash.TObjectIntHashMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.joda.TimeZoneRounding;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -38,7 +37,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.Chronology;
|
||||
import org.joda.time.DateTimeField;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.MutableDateTime;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
@ -50,7 +49,6 @@ import java.util.Map;
|
||||
public class DateHistogramFacetProcessor extends AbstractComponent implements FacetProcessor {
|
||||
|
||||
private final ImmutableMap<String, DateFieldParser> dateFieldParsers;
|
||||
private final TObjectIntHashMap<String> rounding = new TObjectIntHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
|
||||
|
||||
@Inject
|
||||
public DateHistogramFacetProcessor(Settings settings) {
|
||||
@ -73,15 +71,6 @@ public class DateHistogramFacetProcessor extends AbstractComponent implements Fa
|
||||
.put("second", new DateFieldParser.SecondOfMinute())
|
||||
.put("1s", new DateFieldParser.SecondOfMinute())
|
||||
.immutableMap();
|
||||
|
||||
rounding.put("floor", MutableDateTime.ROUND_FLOOR);
|
||||
rounding.put("ceiling", MutableDateTime.ROUND_CEILING);
|
||||
rounding.put("half_even", MutableDateTime.ROUND_HALF_EVEN);
|
||||
rounding.put("halfEven", MutableDateTime.ROUND_HALF_EVEN);
|
||||
rounding.put("half_floor", MutableDateTime.ROUND_HALF_FLOOR);
|
||||
rounding.put("halfFloor", MutableDateTime.ROUND_HALF_FLOOR);
|
||||
rounding.put("half_ceiling", MutableDateTime.ROUND_HALF_CEILING);
|
||||
rounding.put("halfCeiling", MutableDateTime.ROUND_HALF_CEILING);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -96,10 +85,11 @@ public class DateHistogramFacetProcessor extends AbstractComponent implements Fa
|
||||
String valueScript = null;
|
||||
String scriptLang = null;
|
||||
Map<String, Object> params = null;
|
||||
boolean intervalSet = false;
|
||||
long interval = 1;
|
||||
String sInterval = null;
|
||||
MutableDateTime dateTime = new MutableDateTime(DateTimeZone.UTC);
|
||||
String interval = null;
|
||||
DateTimeZone preZone = DateTimeZone.UTC;
|
||||
DateTimeZone postZone = DateTimeZone.UTC;
|
||||
float factor = 1.0f;
|
||||
Chronology chronology = ISOChronology.getInstanceUTC();
|
||||
DateHistogramFacet.ComparatorType comparatorType = DateHistogramFacet.ComparatorType.TIME;
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
@ -118,29 +108,15 @@ public class DateHistogramFacetProcessor extends AbstractComponent implements Fa
|
||||
} else if ("value_field".equals(fieldName) || "valueField".equals(fieldName)) {
|
||||
valueField = parser.text();
|
||||
} else if ("interval".equals(fieldName)) {
|
||||
intervalSet = true;
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
interval = parser.longValue();
|
||||
} else {
|
||||
sInterval = parser.text();
|
||||
}
|
||||
interval = parser.text();
|
||||
} else if ("time_zone".equals(fieldName) || "timeZone".equals(fieldName)) {
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
dateTime.setZone(DateTimeZone.forOffsetHours(parser.intValue()));
|
||||
} else {
|
||||
String text = parser.text();
|
||||
int index = text.indexOf(':');
|
||||
if (index != -1) {
|
||||
// format like -02:30
|
||||
dateTime.setZone(DateTimeZone.forOffsetHoursMinutes(
|
||||
Integer.parseInt(text.substring(0, index)),
|
||||
Integer.parseInt(text.substring(index + 1))
|
||||
));
|
||||
} else {
|
||||
// id, listed here: http://joda-time.sourceforge.net/timezones.html
|
||||
dateTime.setZone(DateTimeZone.forID(text));
|
||||
}
|
||||
}
|
||||
preZone = parseZone(parser, token);
|
||||
} else if ("pre_zone".equals(fieldName) || "preZone".equals(fieldName)) {
|
||||
preZone = parseZone(parser, token);
|
||||
} else if ("post_zone".equals(fieldName) || "postZone".equals(fieldName)) {
|
||||
postZone = parseZone(parser, token);
|
||||
} else if ("factor".equals(fieldName)) {
|
||||
factor = parser.floatValue();
|
||||
} else if ("value_script".equals(fieldName) || "valueScript".equals(fieldName)) {
|
||||
valueScript = parser.text();
|
||||
} else if ("order".equals(fieldName) || "comparator".equals(fieldName)) {
|
||||
@ -163,48 +139,46 @@ public class DateHistogramFacetProcessor extends AbstractComponent implements Fa
|
||||
throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] is not of type date");
|
||||
}
|
||||
|
||||
if (!intervalSet) {
|
||||
if (interval == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
|
||||
}
|
||||
|
||||
// we set the rounding after we set the zone, for it to take affect
|
||||
if (sInterval != null) {
|
||||
int index = sInterval.indexOf(':');
|
||||
if (index != -1) {
|
||||
// set with rounding
|
||||
DateFieldParser fieldParser = dateFieldParsers.get(sInterval.substring(0, index));
|
||||
if (fieldParser == null) {
|
||||
throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "] with custom rounding using built in intervals (year/month/...)");
|
||||
}
|
||||
DateTimeField field = fieldParser.parse(dateTime.getChronology());
|
||||
int rounding = this.rounding.get(sInterval.substring(index + 1));
|
||||
if (rounding == -1) {
|
||||
throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "], rounding type [" + (sInterval.substring(index + 1)) + "] not found");
|
||||
}
|
||||
dateTime.setRounding(field, rounding);
|
||||
} else {
|
||||
DateFieldParser fieldParser = dateFieldParsers.get(sInterval);
|
||||
if (fieldParser != null) {
|
||||
DateTimeField field = fieldParser.parse(dateTime.getChronology());
|
||||
dateTime.setRounding(field, MutableDateTime.ROUND_FLOOR);
|
||||
} else {
|
||||
// time interval
|
||||
try {
|
||||
interval = TimeValue.parseTimeValue(sInterval, null).millis();
|
||||
} catch (Exception e) {
|
||||
throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "], tried both as built in intervals (year/month/...) and as a time format");
|
||||
}
|
||||
}
|
||||
}
|
||||
TimeZoneRounding.Builder tzRoundingBuilder;
|
||||
DateFieldParser fieldParser = dateFieldParsers.get(interval);
|
||||
if (fieldParser != null) {
|
||||
tzRoundingBuilder = TimeZoneRounding.builder(fieldParser.parse(chronology));
|
||||
} else {
|
||||
// the interval is a time value?
|
||||
tzRoundingBuilder = TimeZoneRounding.builder(TimeValue.parseTimeValue(interval, null));
|
||||
}
|
||||
|
||||
TimeZoneRounding tzRounding = tzRoundingBuilder.preZone(preZone).postZone(postZone).factor(factor).build();
|
||||
|
||||
if (valueScript != null) {
|
||||
return new ValueScriptDateHistogramFacetCollector(facetName, keyField, scriptLang, valueScript, params, dateTime, interval, comparatorType, context);
|
||||
return new ValueScriptDateHistogramFacetCollector(facetName, keyField, scriptLang, valueScript, params, tzRounding, comparatorType, context);
|
||||
} else if (valueField == null) {
|
||||
return new CountDateHistogramFacetCollector(facetName, keyField, dateTime, interval, comparatorType, context);
|
||||
return new CountDateHistogramFacetCollector(facetName, keyField, tzRounding, comparatorType, context);
|
||||
} else {
|
||||
return new ValueDateHistogramFacetCollector(facetName, keyField, valueField, dateTime, interval, comparatorType, context);
|
||||
return new ValueDateHistogramFacetCollector(facetName, keyField, valueField, tzRounding, comparatorType, context);
|
||||
}
|
||||
}
|
||||
|
||||
private DateTimeZone parseZone(XContentParser parser, XContentParser.Token token) throws IOException {
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
return DateTimeZone.forOffsetHours(parser.intValue());
|
||||
} else {
|
||||
String text = parser.text();
|
||||
int index = text.indexOf(':');
|
||||
if (index != -1) {
|
||||
// format like -02:30
|
||||
return DateTimeZone.forOffsetHoursMinutes(
|
||||
Integer.parseInt(text.substring(0, index)),
|
||||
Integer.parseInt(text.substring(index + 1))
|
||||
);
|
||||
} else {
|
||||
// id, listed here: http://joda-time.sourceforge.net/timezones.html
|
||||
return DateTimeZone.forID(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.facet.datehistogram;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.joda.TimeZoneRounding;
|
||||
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
@ -32,7 +33,6 @@ import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -44,9 +44,6 @@ public class ValueDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
private final String keyIndexFieldName;
|
||||
private final String valueIndexFieldName;
|
||||
|
||||
private MutableDateTime dateTime;
|
||||
private final long interval;
|
||||
|
||||
private final DateHistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
@ -58,10 +55,8 @@ public class ValueDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
|
||||
private final DateHistogramProc histoProc;
|
||||
|
||||
public ValueDateHistogramFacetCollector(String facetName, String keyFieldName, String valueFieldName, MutableDateTime dateTime, long interval, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public ValueDateHistogramFacetCollector(String facetName, String keyFieldName, String valueFieldName, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.dateTime = dateTime;
|
||||
this.interval = interval;
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
@ -85,12 +80,12 @@ public class ValueDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
valueIndexFieldName = mapper.names().indexName();
|
||||
valueFieldDataType = mapper.fieldDataType();
|
||||
|
||||
this.histoProc = new DateHistogramProc(interval);
|
||||
this.histoProc = new DateHistogramProc(tzRounding);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
keyFieldData.forEachValueInDoc(doc, dateTime, histoProc);
|
||||
keyFieldData.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -104,26 +99,23 @@ public class ValueDateHistogramFacetCollector extends AbstractFacetCollector {
|
||||
return new InternalFullDateHistogramFacet(facetName, comparatorType, histoProc.entries, true);
|
||||
}
|
||||
|
||||
public static class DateHistogramProc implements LongFieldData.DateValueInDocProc {
|
||||
public static class DateHistogramProc implements LongFieldData.LongValueInDocProc {
|
||||
|
||||
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
|
||||
|
||||
private final long interval;
|
||||
private final TimeZoneRounding tzRounding;
|
||||
|
||||
NumericFieldData valueFieldData;
|
||||
|
||||
final ValueAggregator valueAggregator = new ValueAggregator();
|
||||
|
||||
public DateHistogramProc(long interval) {
|
||||
this.interval = interval;
|
||||
public DateHistogramProc(TimeZoneRounding tzRounding) {
|
||||
this.tzRounding = tzRounding;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, MutableDateTime dateTime) {
|
||||
long time = dateTime.getMillis();
|
||||
if (interval != 1) {
|
||||
time = CountDateHistogramFacetCollector.bucket(time, interval);
|
||||
}
|
||||
public void onValue(int docId, long value) {
|
||||
long time = tzRounding.calc(value);
|
||||
|
||||
InternalFullDateHistogramFacet.FullEntry entry = entries.get(time);
|
||||
if (entry == null) {
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.facet.datehistogram;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.CacheRecycler;
|
||||
import org.elasticsearch.common.joda.TimeZoneRounding;
|
||||
import org.elasticsearch.common.trove.ExtTLongObjectHashMap;
|
||||
import org.elasticsearch.index.cache.field.data.FieldDataCache;
|
||||
import org.elasticsearch.index.field.data.FieldDataType;
|
||||
@ -33,7 +34,6 @@ import org.elasticsearch.search.facet.AbstractFacetCollector;
|
||||
import org.elasticsearch.search.facet.Facet;
|
||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.MutableDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
@ -46,8 +46,6 @@ public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollect
|
||||
|
||||
private final String indexFieldName;
|
||||
|
||||
private final MutableDateTime dateTime;
|
||||
|
||||
private final DateHistogramFacet.ComparatorType comparatorType;
|
||||
|
||||
private final FieldDataCache fieldDataCache;
|
||||
@ -60,9 +58,8 @@ public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollect
|
||||
|
||||
private final DateHistogramProc histoProc;
|
||||
|
||||
public ValueScriptDateHistogramFacetCollector(String facetName, String fieldName, String scriptLang, String valueScript, Map<String, Object> params, MutableDateTime dateTime, long interval, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
public ValueScriptDateHistogramFacetCollector(String facetName, String fieldName, String scriptLang, String valueScript, Map<String, Object> params, TimeZoneRounding tzRounding, DateHistogramFacet.ComparatorType comparatorType, SearchContext context) {
|
||||
super(facetName);
|
||||
this.dateTime = dateTime;
|
||||
this.comparatorType = comparatorType;
|
||||
this.fieldDataCache = context.fieldDataCache();
|
||||
|
||||
@ -83,16 +80,12 @@ public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollect
|
||||
indexFieldName = mapper.names().indexName();
|
||||
fieldDataType = mapper.fieldDataType();
|
||||
|
||||
if (interval == 1) {
|
||||
histoProc = new DateHistogramProc(this.valueScript);
|
||||
} else {
|
||||
histoProc = new IntervalDateHistogramProc(interval, this.valueScript);
|
||||
}
|
||||
histoProc = new DateHistogramProc(tzRounding, this.valueScript);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doCollect(int doc) throws IOException {
|
||||
fieldData.forEachValueInDoc(doc, dateTime, histoProc);
|
||||
fieldData.forEachValueInDoc(doc, histoProc);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -111,54 +104,23 @@ public class ValueScriptDateHistogramFacetCollector extends AbstractFacetCollect
|
||||
return new InternalFullDateHistogramFacet(facetName, comparatorType, histoProc.entries, true);
|
||||
}
|
||||
|
||||
public static class DateHistogramProc implements LongFieldData.DateValueInDocProc {
|
||||
public static class DateHistogramProc implements LongFieldData.LongValueInDocProc {
|
||||
|
||||
private final TimeZoneRounding tzRounding;
|
||||
|
||||
protected final SearchScript valueScript;
|
||||
|
||||
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries = CacheRecycler.popLongObjectMap();
|
||||
|
||||
public DateHistogramProc(SearchScript valueScript) {
|
||||
public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript) {
|
||||
this.tzRounding = tzRounding;
|
||||
this.valueScript = valueScript;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, MutableDateTime dateTime) {
|
||||
public void onValue(int docId, long value) {
|
||||
valueScript.setNextDocId(docId);
|
||||
long time = dateTime.getMillis();
|
||||
double scriptValue = valueScript.runAsDouble();
|
||||
|
||||
InternalFullDateHistogramFacet.FullEntry entry = entries.get(time);
|
||||
if (entry == null) {
|
||||
entry = new InternalFullDateHistogramFacet.FullEntry(time, 1, scriptValue, scriptValue, 1, scriptValue);
|
||||
entries.put(time, entry);
|
||||
} else {
|
||||
entry.count++;
|
||||
entry.totalCount++;
|
||||
entry.total += scriptValue;
|
||||
if (scriptValue < entry.min) {
|
||||
entry.min = scriptValue;
|
||||
}
|
||||
if (scriptValue > entry.max) {
|
||||
entry.max = scriptValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class IntervalDateHistogramProc extends DateHistogramProc {
|
||||
|
||||
private final long interval;
|
||||
|
||||
public IntervalDateHistogramProc(long interval, SearchScript valueScript) {
|
||||
super(valueScript);
|
||||
this.interval = interval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onValue(int docId, MutableDateTime dateTime) {
|
||||
valueScript.setNextDocId(docId);
|
||||
|
||||
long time = CountDateHistogramFacetCollector.bucket(dateTime.getMillis(), interval);
|
||||
long time = tzRounding.calc(value);
|
||||
double scriptValue = valueScript.runAsDouble();
|
||||
|
||||
InternalFullDateHistogramFacet.FullEntry entry = entries.get(time);
|
||||
|
@ -1424,19 +1424,19 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||
facet = searchResponse.facets().facet("stats2");
|
||||
assertThat(facet.name(), equalTo("stats2"));
|
||||
assertThat(facet.entries().size(), equalTo(2));
|
||||
assertThat(facet.entries().get(0).time(), equalTo(timeInMillis("2009-03-04", DateTimeZone.forOffsetHours(-2))));
|
||||
assertThat(facet.entries().get(0).time(), equalTo(utcTimeInMillis("2009-03-04")));
|
||||
assertThat(facet.entries().get(0).count(), equalTo(1l));
|
||||
assertThat(facet.entries().get(1).time(), equalTo(timeInMillis("2009-03-05", DateTimeZone.forOffsetHours(-2))));
|
||||
assertThat(facet.entries().get(1).time(), equalTo(utcTimeInMillis("2009-03-05")));
|
||||
assertThat(facet.entries().get(1).count(), equalTo(2l));
|
||||
|
||||
// time zone causes the dates to shift by 2
|
||||
facet = searchResponse.facets().facet("stats3");
|
||||
assertThat(facet.name(), equalTo("stats3"));
|
||||
assertThat(facet.entries().size(), equalTo(2));
|
||||
assertThat(facet.entries().get(0).time(), equalTo(timeInMillis("2009-03-04", DateTimeZone.forOffsetHours(-2))));
|
||||
assertThat(facet.entries().get(0).time(), equalTo(utcTimeInMillis("2009-03-04")));
|
||||
assertThat(facet.entries().get(0).count(), equalTo(1l));
|
||||
assertThat(facet.entries().get(0).total(), equalTo(1d));
|
||||
assertThat(facet.entries().get(1).time(), equalTo(timeInMillis("2009-03-05", DateTimeZone.forOffsetHours(-2))));
|
||||
assertThat(facet.entries().get(1).time(), equalTo(utcTimeInMillis("2009-03-05")));
|
||||
assertThat(facet.entries().get(1).count(), equalTo(2l));
|
||||
assertThat(facet.entries().get(1).total(), equalTo(5d));
|
||||
|
||||
@ -1444,10 +1444,10 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||
facet = searchResponse.facets().facet("stats4");
|
||||
assertThat(facet.name(), equalTo("stats4"));
|
||||
assertThat(facet.entries().size(), equalTo(2));
|
||||
assertThat(facet.entries().get(0).time(), equalTo(timeInMillis("2009-03-04", DateTimeZone.forOffsetHours(-2))));
|
||||
assertThat(facet.entries().get(0).time(), equalTo(utcTimeInMillis("2009-03-04")));
|
||||
assertThat(facet.entries().get(0).count(), equalTo(1l));
|
||||
assertThat(facet.entries().get(0).total(), equalTo(2d));
|
||||
assertThat(facet.entries().get(1).time(), equalTo(timeInMillis("2009-03-05", DateTimeZone.forOffsetHours(-2))));
|
||||
assertThat(facet.entries().get(1).time(), equalTo(utcTimeInMillis("2009-03-05")));
|
||||
assertThat(facet.entries().get(1).count(), equalTo(2l));
|
||||
assertThat(facet.entries().get(1).total(), equalTo(10d));
|
||||
|
||||
|
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.unit.deps.joda;
|
||||
|
||||
import org.elasticsearch.common.joda.TimeZoneRounding;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.joda.time.Chronology;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.chrono.ISOChronology;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
@Test
|
||||
public class TimeZoneRoundingTests {
|
||||
|
||||
@Test
|
||||
public void testUTCMonthRounding() {
|
||||
TimeZoneRounding tzRounding = TimeZoneRounding.builder(chronology().monthOfYear()).build();
|
||||
assertThat(tzRounding.calc(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-01T00:00:00.000Z")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDayTimeZoneRounding() {
|
||||
TimeZoneRounding tzRounding = TimeZoneRounding.builder(chronology().dayOfMonth()).preZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(0), equalTo(0l - TimeValue.timeValueHours(24).millis()));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(chronology().dayOfMonth()).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(0), equalTo(0l - TimeValue.timeValueHours(26).millis()));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(chronology().dayOfMonth()).preZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-02T00:00:00")));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(chronology().dayOfMonth()).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(utc("2009-02-03T01:01:01")), equalTo(time("2009-02-02T00:00:00", DateTimeZone.forOffsetHours(+2))));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTimeTimeZoneRounding() {
|
||||
TimeZoneRounding tzRounding = TimeZoneRounding.builder(chronology().hourOfDay()).preZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(0), equalTo(0l));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(chronology().hourOfDay()).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(0), equalTo(0l - TimeValue.timeValueHours(2).millis()));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(chronology().hourOfDay()).preZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T01:00:00")));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(chronology().hourOfDay()).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.calc(utc("2009-02-03T01:01:01")), equalTo(time("2009-02-03T01:00:00", DateTimeZone.forOffsetHours(+2))));
|
||||
}
|
||||
|
||||
private static Chronology chronology() {
|
||||
return ISOChronology.getInstanceUTC();
|
||||
}
|
||||
|
||||
private long utc(String time) {
|
||||
return time(time, DateTimeZone.UTC);
|
||||
}
|
||||
|
||||
private long time(String time, DateTimeZone zone) {
|
||||
return ISODateTimeFormat.dateOptionalTimeParser().withZone(zone).parseMillis(time);
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user