Switch mapping/aggregations over to java time (#36363)

This commit moves the aggregation and mapping code from joda time to
java time. This includes field mappers, root object mappers, aggregations with date
histograms, query builders and a lot of changes within tests.

The cut-over to java time is a requirement so that we can support nanoseconds
properly in a future field mapper.

Relates #27330
This commit is contained in:
Alexander Reelsen 2019-01-23 10:40:05 +01:00 committed by GitHub
parent 7b3dd3022d
commit daa2ec8a60
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
157 changed files with 1824 additions and 2250 deletions

View File

@ -55,4 +55,3 @@ public class DateFormatterBenchmark {
return jodaFormatter.parse("1234567890");
}
}

View File

@ -31,9 +31,9 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
@ -107,7 +107,7 @@ public class DenseVectorFieldMapper extends FieldMapper implements ArrayValueMap
}
@Override
public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
throw new UnsupportedOperationException(
"Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations");
}

View File

@ -59,10 +59,10 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.math.BigDecimal;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@ -301,7 +301,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");

View File

@ -31,9 +31,9 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
@ -107,7 +107,7 @@ public class SparseVectorFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
throw new UnsupportedOperationException(
"Field [" + name() + "] of type [" + typeName() + "] doesn't support docvalue_fields or aggregations");
}

View File

@ -46,9 +46,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@ -208,7 +208,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
};
@Override
public DocValueFormat docValueFormat(final String format, final DateTimeZone timeZone) {
public DocValueFormat docValueFormat(final String format, final ZoneId timeZone) {
return COLLATE_FORMAT;
}
}

View File

@ -9,7 +9,7 @@
index: timetest
body:
mappings:
test: { "properties": { "my_time": {"type": "date"}}}
test: { "properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}}
- do:
ingest.put_pipeline:

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -368,7 +367,7 @@ public final class IndexGraveyard implements MetaData.Custom {
TOMBSTONE_PARSER.declareString((b, s) -> {}, new ParseField(DELETE_DATE_KEY));
}
static final DateFormatter FORMATTER = DateFormatters.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC);
static final DateFormatter FORMATTER = DateFormatter.forPattern("strict_date_optional_time").withZone(ZoneOffset.UTC);
static ContextParser<Void, Tombstone> getParser() {
return (parser, context) -> TOMBSTONE_PARSER.apply(parser, null).build();

View File

@ -28,8 +28,8 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
@ -819,7 +819,7 @@ public class IndexNameExpressionResolver {
static final class DateMathExpressionResolver implements ExpressionResolver {
private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatters.forPattern("uuuu.MM.dd");
private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd");
private static final String EXPRESSION_LEFT_BOUND = "<";
private static final String EXPRESSION_RIGHT_BOUND = ">";
private static final char LEFT_BOUND = '{';
@ -912,18 +912,19 @@ public class IndexNameExpressionResolver {
int formatPatternTimeZoneSeparatorIndex = patternAndTZid.indexOf(TIME_ZONE_BOUND);
if (formatPatternTimeZoneSeparatorIndex != -1) {
dateFormatterPattern = patternAndTZid.substring(0, formatPatternTimeZoneSeparatorIndex);
timeZone = ZoneId.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1));
timeZone = DateUtils.of(patternAndTZid.substring(formatPatternTimeZoneSeparatorIndex + 1));
} else {
dateFormatterPattern = patternAndTZid;
timeZone = ZoneOffset.UTC;
}
dateFormatter = DateFormatters.forPattern(dateFormatterPattern);
dateFormatter = DateFormatter.forPattern(dateFormatterPattern);
}
DateFormatter formatter = dateFormatter.withZone(timeZone);
DateMathParser dateMathParser = formatter.toDateMathParser();
long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
Instant instant = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
String time = formatter.format(Instant.ofEpochMilli(millis));
String time = formatter.format(instant);
beforePlaceHolderSb.append(time);
inPlaceHolderSb = new StringBuilder();
inPlaceHolder = false;

View File

@ -32,7 +32,6 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -48,7 +47,7 @@ import java.util.Objects;
*/
public final class UnassignedInfo implements ToXContentFragment, Writeable {
public static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC);
public static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("dateOptionalTime").withZone(ZoneOffset.UTC);
public static final Setting<TimeValue> INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING =
Setting.positiveTimeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic,

View File

@ -19,9 +19,11 @@
package org.elasticsearch.common;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.unit.TimeValue;
import java.io.IOException;
@ -188,7 +190,7 @@ public abstract class Rounding implements Writeable {
TimeUnitRounding(StreamInput in) throws IOException {
unit = DateTimeUnit.resolve(in.readByte());
timeZone = ZoneId.of(in.readString());
timeZone = DateUtils.of(in.readString());
unitRoundsToMidnight = unit.getField().getBaseUnit().getDuration().toMillis() > 60L * 60L * 1000L;
}
@ -367,8 +369,11 @@ public abstract class Rounding implements Writeable {
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeByte(unit.getId());
String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible
out.writeString(tz);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
out.writeString(timeZone.getId());
} else {
out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
}
}
@Override
@ -417,7 +422,7 @@ public abstract class Rounding implements Writeable {
TimeIntervalRounding(StreamInput in) throws IOException {
interval = in.readVLong();
timeZone = ZoneId.of(in.readString());
timeZone = DateUtils.of(in.readString());
}
@Override
@ -490,8 +495,11 @@ public abstract class Rounding implements Writeable {
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
out.writeVLong(interval);
String tz = ZoneOffset.UTC.equals(timeZone) ? "UTC" : timeZone.getId(); // stay joda compatible
out.writeString(tz);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
out.writeString(timeZone.getId());
} else {
out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
}
}
@Override

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import java.time.Instant;
import java.time.ZoneOffset;
@ -85,7 +84,7 @@ public class Table {
return this;
}
private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
public Table startRow() {
if (headers.isEmpty()) {

View File

@ -653,6 +653,23 @@ public abstract class StreamInput extends InputStream {
return null;
}
/**
* Read a {@linkplain DateTimeZone}.
*/
public ZoneId readZoneId() throws IOException {
return ZoneId.of(readString());
}
/**
* Read an optional {@linkplain ZoneId}.
*/
public ZoneId readOptionalZoneId() throws IOException {
if (readBoolean()) {
return ZoneId.of(readString());
}
return null;
}
public int[] readIntArray() throws IOException {
int length = readArraySize();
int[] values = new int[length];

View File

@ -55,6 +55,7 @@ import java.nio.file.FileSystemException;
import java.nio.file.FileSystemLoopException;
import java.nio.file.NoSuchFileException;
import java.nio.file.NotDirectoryException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Collections;
@ -677,7 +678,6 @@ public abstract class StreamOutput extends OutputStream {
writers.put(ZonedDateTime.class, (o, v) -> {
o.writeByte((byte) 23);
final ZonedDateTime zonedDateTime = (ZonedDateTime) v;
zonedDateTime.getZone().getId();
o.writeString(zonedDateTime.getZone().getId());
o.writeLong(zonedDateTime.toInstant().toEpochMilli());
});
@ -988,6 +988,13 @@ public abstract class StreamOutput extends OutputStream {
writeString(timeZone.getID());
}
/**
* Write a {@linkplain ZoneId} to the stream.
*/
public void writeZoneId(ZoneId timeZone) throws IOException {
writeString(timeZone.getId());
}
/**
* Write an optional {@linkplain DateTimeZone} to the stream.
*/
@ -1000,6 +1007,18 @@ public abstract class StreamOutput extends OutputStream {
}
}
/**
* Write an optional {@linkplain ZoneId} to the stream.
*/
public void writeOptionalZoneId(@Nullable ZoneId timeZone) throws IOException {
if (timeZone == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeZoneId(timeZone);
}
}
/**
* Writes a list of {@link Streamable} objects
*/

View File

@ -31,12 +31,12 @@ import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
import java.util.Objects;
public class JodaDateFormatter implements DateFormatter {
final String pattern;
final DateTimeFormatter parser;
final DateTimeFormatter printer;
public JodaDateFormatter(String pattern, DateTimeFormatter parser, DateTimeFormatter printer) {
@ -108,4 +108,21 @@ public class JodaDateFormatter implements DateFormatter {
public DateMathParser toDateMathParser() {
return new JodaDateMathParser(this);
}
@Override
public int hashCode() {
return Objects.hash(locale(), zone(), pattern());
}
@Override
public boolean equals(Object obj) {
if (obj.getClass().equals(this.getClass()) == false) {
return false;
}
JodaDateFormatter other = (JodaDateFormatter) obj;
return Objects.equals(pattern(), other.pattern()) &&
Objects.equals(locale(), other.locale()) &&
Objects.equals(zone(), other.zone());
}
}

View File

@ -26,6 +26,7 @@ import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormatter;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Objects;
import java.util.function.LongSupplier;
@ -50,7 +51,7 @@ public class JodaDateMathParser implements DateMathParser {
// if it has been used. For instance, the request cache does not cache requests that make
// use of `now`.
@Override
public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) {
final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz);
long time;
String mathString;
@ -64,13 +65,13 @@ public class JodaDateMathParser implements DateMathParser {
} else {
int index = text.indexOf("||");
if (index == -1) {
return parseDateTime(text, timeZone, roundUp);
return Instant.ofEpochMilli(parseDateTime(text, timeZone, roundUp));
}
time = parseDateTime(text.substring(0, index), timeZone, false);
mathString = text.substring(index + 2);
}
return parseMath(mathString, time, roundUp, timeZone);
return Instant.ofEpochMilli(parseMath(mathString, time, roundUp, timeZone));
}
private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws ElasticsearchParseException {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common.time;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.Joda;
import org.joda.time.DateTime;
import java.time.Instant;
@ -87,7 +86,8 @@ public interface DateFormatter {
* Return the given millis-since-epoch formatted with this format.
*/
default String formatMillis(long millis) {
return format(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC));
ZoneId zone = zone() != null ? zone() : ZoneOffset.UTC;
return format(Instant.ofEpochMilli(millis).atZone(zone));
}
/**
@ -121,7 +121,9 @@ public interface DateFormatter {
ZoneId zone();
/**
* Return a {@link DateMathParser} built from this formatter.
* Create a DateMathParser from the existing formatter
*
* @return The DateMathParser object
*/
DateMathParser toDateMathParser();
@ -129,12 +131,11 @@ public interface DateFormatter {
if (Strings.hasLength(input) == false) {
throw new IllegalArgumentException("No date pattern provided");
}
if (input.startsWith("8") == false) {
return Joda.forPattern(input);
}
// dates starting with 8 will not be using joda but java time formatters
input = input.substring(1);
// support the 6.x BWC compatible way of parsing java 8 dates
if (input.startsWith("8")) {
input = input.substring(1);
}
List<DateFormatter> formatters = new ArrayList<>();
for (String pattern : Strings.delimitedListToStringArray(input, "||")) {

View File

@ -76,28 +76,53 @@ public class DateFormatters {
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.optionalStart()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalEnd()
.optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.optionalStart()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.optionalEnd()
.optionalEnd()
.optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
.optionalStart()
.append(TIME_ZONE_FORMATTER_NO_COLON)
.optionalEnd()
.optionalEnd()
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
/**
@ -123,11 +148,33 @@ public class DateFormatters {
.optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral('T')
.optionalStart()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendFraction(NANO_OF_SECOND, 3, 9, true)
.optionalEnd()
.optionalEnd()
.optionalStart()
.appendZoneOrOffsetId()
.optionalEnd()
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
/**
* Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution.
*/
private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter("strict_date_optional_time_nanos",
STRICT_DATE_OPTIONAL_TIME_PRINTER, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS);
STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS, STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS);
/////////////////////////////////////////
//
@ -329,31 +376,32 @@ public class DateFormatters {
* Returns a basic formatter that combines a basic weekyear date and time
* without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX).
*/
private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new JavaDateFormatter("strict_basic_week_date_no_millis",
new DateTimeFormatterBuilder()
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT),
new DateTimeFormatterBuilder()
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT),
new DateTimeFormatterBuilder()
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
private static final DateFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS =
new JavaDateFormatter("strict_basic_week_date_time_no_millis",
new DateTimeFormatterBuilder()
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT),
new DateTimeFormatterBuilder()
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.appendZoneOrOffsetId()
.toFormatter(Locale.ROOT),
new DateTimeFormatterBuilder()
.append(STRICT_BASIC_WEEK_DATE_PRINTER)
.appendLiteral("T")
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.append(TIME_ZONE_FORMATTER_NO_COLON)
.toFormatter(Locale.ROOT)
);
/*
@ -389,7 +437,7 @@ public class DateFormatters {
* An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'.
*/
private static final DateFormatter STRICT_DATE = new JavaDateFormatter("strict_date",
DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT));
DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT).withLocale(Locale.ROOT));
/*
* A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'.
@ -514,7 +562,9 @@ public class DateFormatters {
new JavaDateFormatter("strict_hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS;
private static final DateFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION =
new JavaDateFormatter("strict_hour_minute_second_fraction",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
/*
* Returns a formatter that combines a full date, two digit hour of day,
@ -537,7 +587,21 @@ public class DateFormatters {
.toFormatter(Locale.ROOT)
);
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION;
private static final DateFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter(
"strict_date_hour_minute_second_millis",
new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
.toFormatter(Locale.ROOT),
new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
// this one here is lenient as well to retain joda time based bwc compatibility
.appendFraction(NANO_OF_SECOND, 1, 3, true)
.toFormatter(Locale.ROOT)
);
/*
* Returns a formatter for a two digit hour of day. (HH)
@ -782,14 +846,12 @@ public class DateFormatters {
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 1, 5, SignStyle.NORMAL)
.optionalStart()
.appendLiteral('-')
.appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalStart()
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder()
@ -928,7 +990,17 @@ public class DateFormatters {
.append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
.toFormatter(Locale.ROOT));
private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS;
private static final DateFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("date_hour_minute_second_fraction",
new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.appendLiteral("T")
.append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER)
.toFormatter(Locale.ROOT),
new DateTimeFormatterBuilder()
.append(DATE_FORMATTER)
.appendLiteral("T")
.append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER)
.toFormatter(Locale.ROOT));
/*
* Returns a formatter that combines a full date, two digit hour of day,
@ -1033,6 +1105,9 @@ public class DateFormatters {
private static final DateFormatter HOUR_MINUTE_SECOND_MILLIS = new JavaDateFormatter("hour_minute_second_millis",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
private static final DateFormatter HOUR_MINUTE_SECOND_FRACTION = new JavaDateFormatter("hour_minute_second_fraction",
STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, HOUR_MINUTE_SECOND_MILLIS_FORMATTER);
/*
* Returns a formatter for a two digit hour of day and two digit minute of
* hour. (HH:mm)
@ -1272,7 +1347,7 @@ public class DateFormatters {
//
/////////////////////////////////////////
public static DateFormatter forPattern(String input) {
static DateFormatter forPattern(String input) {
if (Strings.hasLength(input)) {
input = input.trim();
}
@ -1331,7 +1406,7 @@ public class DateFormatters {
} else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) {
return HOUR_MINUTE_SECOND;
} else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) {
return HOUR_MINUTE_SECOND_MILLIS;
return HOUR_MINUTE_SECOND_FRACTION;
} else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) {
return HOUR_MINUTE_SECOND_MILLIS;
} else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common.time;
import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.ZoneId;
import java.util.function.LongSupplier;
@ -32,7 +33,7 @@ public interface DateMathParser {
/**
* Parse a date math expression without timzeone info and rounding down.
*/
default long parse(String text, LongSupplier now) {
default Instant parse(String text, LongSupplier now) {
return parse(text, now, false, (ZoneId) null);
}
@ -42,7 +43,7 @@ public interface DateMathParser {
// exists for backcompat, do not use!
@Deprecated
default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) {
default Instant parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) {
return parse(text, now, roundUp, tz == null ? null : ZoneId.of(tz.getID()));
}
@ -68,7 +69,7 @@ public interface DateMathParser {
* @param now a supplier to retrieve the current date in milliseconds, if needed for additions
* @param roundUp should the result be rounded up
* @param tz an optional timezone that should be applied before returning the milliseconds since the epoch
* @return the parsed date in milliseconds since the epoch
* @return the parsed date as an Instant since the epoch
*/
long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz);
Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId tz);
}

View File

@ -65,12 +65,16 @@ public class DateUtils {
return ZoneOffset.UTC;
}
String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(timeZone.getID());
return of(timeZone.getID());
}
public static ZoneId of(String zoneId) {
String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(zoneId);
if (deprecatedId != null) {
deprecationLogger.deprecatedAndMaybeLog("timezone",
"Use of short timezone id " + timeZone.getID() + " is deprecated. Use " + deprecatedId + " instead");
"Use of short timezone id " + zoneId + " is deprecated. Use " + deprecatedId + " instead");
return ZoneId.of(deprecatedId);
}
return ZoneId.of(timeZone.getID());
return ZoneId.of(zoneId).normalized();
}
}

View File

@ -19,6 +19,8 @@
package org.elasticsearch.common.time;
import org.elasticsearch.bootstrap.JavaVersion;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.ResolverStyle;
@ -99,6 +101,10 @@ class EpochTime {
}
fieldValues.put(ChronoField.INSTANT_SECONDS, seconds);
fieldValues.put(ChronoField.NANO_OF_SECOND, nanos);
// if there is already a milli of second, we need to overwrite it
if (fieldValues.containsKey(ChronoField.MILLI_OF_SECOND)) {
fieldValues.put(ChronoField.MILLI_OF_SECOND, nanos / 1_000_000);
}
return null;
}
};
@ -106,7 +112,8 @@ class EpochTime {
private static final EpochField NANOS_OF_MILLI = new EpochField(ChronoUnit.NANOS, ChronoUnit.MILLIS, ValueRange.of(0, 999_999)) {
@Override
public boolean isSupportedBy(TemporalAccessor temporal) {
return temporal.isSupported(ChronoField.NANO_OF_SECOND) && temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0;
return temporal.isSupported(ChronoField.INSTANT_SECONDS) && temporal.isSupported(ChronoField.NANO_OF_SECOND)
&& temporal.getLong(ChronoField.NANO_OF_SECOND) % 1_000_000 != 0;
}
@Override
public long getFrom(TemporalAccessor temporal) {
@ -156,9 +163,20 @@ class EpochTime {
builder -> builder.parseDefaulting(ChronoField.NANO_OF_SECOND, 999_999_999L),
SECONDS_FORMATTER1, SECONDS_FORMATTER2, SECONDS_FORMATTER3);
static final DateFormatter MILLIS_FORMATTER = new JavaDateFormatter("epoch_millis", MILLISECONDS_FORMATTER3,
builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L),
MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3);
static final DateFormatter MILLIS_FORMATTER = getEpochMillisFormatter();
private static DateFormatter getEpochMillisFormatter() {
// the third formatter fails under java 8 as a printer, so fall back to this one
final DateTimeFormatter printer;
if (JavaVersion.current().getVersion().get(0) == 8) {
printer = MILLISECONDS_FORMATTER1;
} else {
printer = MILLISECONDS_FORMATTER3;
}
return new JavaDateFormatter("epoch_millis", printer,
builder -> builder.parseDefaulting(EpochTime.NANOS_OF_MILLI, 999_999L),
MILLISECONDS_FORMATTER1, MILLISECONDS_FORMATTER2, MILLISECONDS_FORMATTER3);
}
private abstract static class EpochField implements TemporalField {

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.Strings;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
@ -76,6 +77,8 @@ class JavaDateFormatter implements DateFormatter {
if (distinctLocales > 1) {
throw new IllegalArgumentException("formatters must have the same locale");
}
this.printer = printer;
this.format = format;
if (parsers.length == 0) {
this.parser = printer;
} else if (parsers.length == 1) {
@ -87,11 +90,11 @@ class JavaDateFormatter implements DateFormatter {
}
this.parser = builder.toFormatter(Locale.ROOT);
}
this.format = format;
this.printer = printer;
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
builder.append(this.parser);
if (format.contains("||") == false) {
builder.append(this.parser);
}
roundupParserConsumer.accept(builder);
DateTimeFormatter roundupFormatter = builder.toFormatter(parser.getLocale());
if (printer.getZone() != null) {
@ -117,7 +120,12 @@ class JavaDateFormatter implements DateFormatter {
if (Strings.isNullOrEmpty(input)) {
throw new IllegalArgumentException("cannot parse empty date");
}
return parser.parse(input);
try {
return parser.parse(input);
} catch (DateTimeParseException e) {
throw new IllegalArgumentException("failed to parse date field [" + input + "] with format [" + format + "]", e);
}
}
@Override
@ -162,7 +170,7 @@ class JavaDateFormatter implements DateFormatter {
@Override
public DateMathParser toDateMathParser() {
return new JavaDateMathParser(parser, roundupParser);
return new JavaDateMathParser(format, parser, roundupParser);
}
@Override

View File

@ -22,7 +22,6 @@ package org.elasticsearch.common.time;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import java.time.DateTimeException;
import java.time.DayOfWeek;
import java.time.Instant;
import java.time.LocalTime;
@ -30,6 +29,7 @@ import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalAdjusters;
@ -48,20 +48,23 @@ public class JavaDateMathParser implements DateMathParser {
private final DateTimeFormatter formatter;
private final DateTimeFormatter roundUpFormatter;
private final String format;
public JavaDateMathParser(DateTimeFormatter formatter, DateTimeFormatter roundUpFormatter) {
JavaDateMathParser(String format, DateTimeFormatter formatter, DateTimeFormatter roundUpFormatter) {
this.format = format;
Objects.requireNonNull(formatter);
this.formatter = formatter;
this.roundUpFormatter = roundUpFormatter;
}
@Override
public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) {
long time;
public Instant parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) {
Instant time;
String mathString;
if (text.startsWith("now")) {
try {
time = now.getAsLong();
// TODO only millisecond granularity here!
time = Instant.ofEpochMilli(now.getAsLong());
} catch (Exception e) {
throw new ElasticsearchParseException("could not read the current timestamp", e);
}
@ -78,12 +81,12 @@ public class JavaDateMathParser implements DateMathParser {
return parseMath(mathString, time, roundUp, timeZone);
}
private long parseMath(final String mathString, final long time, final boolean roundUp,
private Instant parseMath(final String mathString, final Instant time, final boolean roundUp,
ZoneId timeZone) throws ElasticsearchParseException {
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone);
ZonedDateTime dateTime = ZonedDateTime.ofInstant(time, timeZone);
for (int i = 0; i < mathString.length(); ) {
char c = mathString.charAt(i++);
final boolean round;
@ -204,18 +207,18 @@ public class JavaDateMathParser implements DateMathParser {
dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit());
}
}
return dateTime.toInstant().toEpochMilli();
return dateTime.toInstant();
}
private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
if (Strings.isNullOrEmpty(value)) {
throw new IllegalArgumentException("cannot parse empty date");
throw new ElasticsearchParseException("cannot parse empty date");
}
DateTimeFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter;
try {
if (timeZone == null) {
return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli();
return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant();
} else {
TemporalAccessor accessor = formatter.parse(value);
ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor);
@ -223,10 +226,11 @@ public class JavaDateMathParser implements DateMathParser {
timeZone = zoneId;
}
return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli();
return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant();
}
} catch (IllegalArgumentException | DateTimeException e) {
throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage());
} catch (DateTimeParseException e) {
throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]: [{}]",
e, value, format, e.getMessage());
}
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
@ -65,9 +64,9 @@ import java.util.function.Function;
public class XContentElasticsearchExtension implements XContentBuilderExtension {
public static final DateTimeFormatter DEFAULT_DATE_PRINTER = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
public static final DateFormatter DEFAULT_FORMATTER = DateFormatters.forPattern("strict_date_optional_time_nanos");
public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSS");
public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatters.forPattern("HH:mm:ss.SSSZZZZZ");
public static final DateFormatter DEFAULT_FORMATTER = DateFormatter.forPattern("strict_date_optional_time_nanos");
public static final DateFormatter LOCAL_TIME_FORMATTER = DateFormatter.forPattern("HH:mm:ss.SSS");
public static final DateFormatter OFFSET_TIME_FORMATTER = DateFormatter.forPattern("HH:mm:ss.SSSZZZZZ");
@Override
public Map<Class<?>, XContentBuilder.Writer> getXContentWriters() {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectArrayList;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@ -41,9 +40,9 @@ import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Base64;
import java.util.List;
import java.util.Map;
@ -108,7 +107,7 @@ public class BinaryFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
return DocValueFormat.BINARY;
}

View File

@ -40,9 +40,9 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -190,7 +190,7 @@ public class BooleanFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}

View File

@ -33,13 +33,15 @@ import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -49,18 +51,17 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
/** A {@link FieldMapper} for ip addresses. */
/** A {@link FieldMapper} for dates. */
public class DateFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "date";
@ -73,8 +74,8 @@ public class DateFieldMapper extends FieldMapper {
public static class Builder extends FieldMapper.Builder<Builder, DateFieldMapper> {
private Boolean ignoreMalformed;
private Explicit<String> format = new Explicit<>(DEFAULT_DATE_TIME_FORMATTER.pattern(), false);
private Locale locale;
private boolean dateTimeFormatterSet = false;
public Builder(String name) {
super(name, new DateFieldType(), new DateFieldType());
@ -102,27 +103,37 @@ public class DateFieldMapper extends FieldMapper {
return Defaults.IGNORE_MALFORMED;
}
/** Whether an explicit format for this date field has been set already. */
public boolean isDateTimeFormatterSet() {
return dateTimeFormatterSet;
}
public Builder dateTimeFormatter(DateFormatter dateTimeFormatter) {
fieldType().setDateTimeFormatter(dateTimeFormatter);
dateTimeFormatterSet = true;
public Builder locale(Locale locale) {
this.locale = locale;
return this;
}
public void locale(Locale locale) {
this.locale = locale;
public Locale locale() {
return locale;
}
public String format() {
return format.value();
}
public Builder format(String format) {
this.format = new Explicit<>(format, true);
return this;
}
public boolean isFormatterSet() {
return format.explicit();
}
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
String pattern = this.format.value();
DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
if (!locale.equals(dateTimeFormatter.locale())) {
fieldType().setDateTimeFormatter(dateTimeFormatter.withLocale(locale));
boolean hasPatternChanged = Strings.hasLength(pattern) && Objects.equals(pattern, dateTimeFormatter.pattern()) == false;
if (hasPatternChanged || Objects.equals(builder.locale, dateTimeFormatter.locale()) == false) {
fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale));
}
}
@ -160,7 +171,7 @@ public class DateFieldMapper extends FieldMapper {
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
builder.format(propNode.toString());
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@ -196,13 +207,12 @@ public class DateFieldMapper extends FieldMapper {
public boolean equals(Object o) {
if (!super.equals(o)) return false;
DateFieldType that = (DateFieldType) o;
return Objects.equals(dateTimeFormatter.pattern(), that.dateTimeFormatter.pattern()) &&
Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale());
return Objects.equals(dateTimeFormatter, that.dateTimeFormatter);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), dateTimeFormatter.pattern(), dateTimeFormatter.locale());
return Objects.hash(super.hashCode(), dateTimeFormatter);
}
@Override
@ -214,10 +224,10 @@ public class DateFieldMapper extends FieldMapper {
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
super.checkCompatibility(fieldType, conflicts);
DateFieldType other = (DateFieldType) fieldType;
if (Objects.equals(dateTimeFormatter().pattern(), other.dateTimeFormatter().pattern()) == false) {
if (Objects.equals(dateTimeFormatter.pattern(), other.dateTimeFormatter.pattern()) == false) {
conflicts.add("mapper [" + name() + "] has different [format] values");
}
if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
if (Objects.equals(dateTimeFormatter.locale(), other.dateTimeFormatter.locale()) == false) {
conflicts.add("mapper [" + name() + "] has different [locale] values");
}
}
@ -226,9 +236,9 @@ public class DateFieldMapper extends FieldMapper {
return dateTimeFormatter;
}
public void setDateTimeFormatter(DateFormatter dateTimeFormatter) {
void setDateTimeFormatter(DateFormatter formatter) {
checkIfFrozen();
this.dateTimeFormatter = dateTimeFormatter;
this.dateTimeFormatter = formatter;
this.dateMathParser = dateTimeFormatter.toDateMathParser();
}
@ -237,7 +247,7 @@ public class DateFieldMapper extends FieldMapper {
}
long parse(String value) {
return dateTimeFormatter().parseMillis(value);
return DateFormatters.toZonedDateTime(dateTimeFormatter().parse(value)).toInstant().toEpochMilli();
}
@Override
@ -260,7 +270,7 @@ public class DateFieldMapper extends FieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation,
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
@Nullable ZoneId timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
failIfNotIndexed();
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
@ -294,8 +304,8 @@ public class DateFieldMapper extends FieldMapper {
return query;
}
public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTimeZone zone,
@Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
public long parseToMilliseconds(Object value, boolean roundUp,
@Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
DateMathParser dateParser = dateMathParser();
if (forcedDateParser != null) {
dateParser = forcedDateParser;
@ -307,13 +317,13 @@ public class DateFieldMapper extends FieldMapper {
} else {
strValue = value.toString();
}
return dateParser.parse(strValue, context::nowInMillis, roundUp, DateUtils.dateTimeZoneToZoneId(zone));
return dateParser.parse(strValue, context::nowInMillis, roundUp, zone).toEpochMilli();
}
@Override
public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper,
DateTimeZone timeZone, DateMathParser dateParser,
QueryRewriteContext context) throws IOException {
public Relation isFieldWithinQuery(IndexReader reader,
Object from, Object to, boolean includeLower, boolean includeUpper,
ZoneId timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
if (dateParser == null) {
dateParser = this.dateMathParser;
}
@ -376,13 +386,13 @@ public class DateFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
DateFormatter dateTimeFormatter = this.dateTimeFormatter;
if (format != null) {
dateTimeFormatter = DateFormatter.forPattern(format);
dateTimeFormatter = DateFormatter.forPattern(format).withLocale(dateTimeFormatter.locale());
}
if (timeZone == null) {
timeZone = DateTimeZone.UTC;
timeZone = ZoneOffset.UTC;
}
return new DocValueFormat.DateTime(dateTimeFormatter, timeZone);
}
@ -442,7 +452,7 @@ public class DateFieldMapper extends FieldMapper {
long timestamp;
try {
timestamp = fieldType().parse(dateAsString);
} catch (IllegalArgumentException e) {
} catch (IllegalArgumentException | ElasticsearchParseException e) {
if (ignoreMalformed.value()) {
context.addIgnoredField(fieldType.name());
return;
@ -489,8 +499,9 @@ public class DateFieldMapper extends FieldMapper {
|| fieldType().dateTimeFormatter().pattern().equals(DEFAULT_DATE_TIME_FORMATTER.pattern()) == false) {
builder.field("format", fieldType().dateTimeFormatter().pattern());
}
if (includeDefaults
|| fieldType().dateTimeFormatter().locale() != Locale.ROOT) {
|| fieldType().dateTimeFormatter().locale().equals(DEFAULT_DATE_TIME_FORMATTER.locale()) == false) {
builder.field("locale", fieldType().dateTimeFormatter().locale());
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
@ -35,6 +36,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import java.io.IOException;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@ -672,7 +674,7 @@ final class DocumentParser {
private static Mapper.Builder<?, ?> newDateBuilder(String name, DateFormatter dateTimeFormatter, Version indexCreated) {
DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name);
if (dateTimeFormatter != null) {
builder.dateTimeFormatter(dateTimeFormatter);
builder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.locale());
}
return builder;
}
@ -717,8 +719,8 @@ final class DocumentParser {
// `epoch_millis` or `YYYY`
for (DateFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parseMillis(text);
} catch (IllegalArgumentException e) {
dateTimeFormatter.parse(text);
} catch (ElasticsearchParseException | DateTimeParseException | IllegalArgumentException e) {
// failure to parse this, continue
continue;
}
@ -728,8 +730,8 @@ final class DocumentParser {
}
if (builder instanceof DateFieldMapper.Builder) {
DateFieldMapper.Builder dateBuilder = (DateFieldMapper.Builder) builder;
if (dateBuilder.isDateTimeFormatterSet() == false) {
dateBuilder.dateTimeFormatter(dateTimeFormatter);
if (dateBuilder.isFormatterSet() == false) {
dateBuilder.format(dateTimeFormatter.pattern()).locale(dateTimeFormatter.locale());
}
}
return builder;

View File

@ -44,10 +44,10 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
@ -303,7 +303,7 @@ public class IpFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}

View File

@ -50,9 +50,9 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.List;
import java.util.Objects;
@ -335,10 +335,10 @@ public abstract class MappedFieldType extends FieldType {
* @param relation the relation, nulls should be interpreted like INTERSECTS
*/
public Query rangeQuery(
Object lowerTerm, Object upperTerm,
boolean includeLower, boolean includeUpper,
ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser,
QueryShardContext context) {
Object lowerTerm, Object upperTerm,
boolean includeLower, boolean includeUpper,
ShapeRelation relation, ZoneId timeZone, DateMathParser parser,
QueryShardContext context) {
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
}
@ -413,7 +413,7 @@ public abstract class MappedFieldType extends FieldType {
IndexReader reader,
Object from, Object to,
boolean includeLower, boolean includeUpper,
DateTimeZone timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
ZoneId timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
return Relation.INTERSECTS;
}
@ -448,7 +448,7 @@ public abstract class MappedFieldType extends FieldType {
/** Return a {@link DocValueFormat} that can be used to display and parse
* values as returned by the fielddata API.
* The default implementation returns a {@link DocValueFormat#RAW}. */
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats");
}

View File

@ -53,9 +53,9 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
@ -961,7 +961,7 @@ public class NumberFieldMapper extends FieldMapper {
}
@Override
public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) {
public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
if (timeZone != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName()
+ "] does not support custom time zones");

View File

@ -42,6 +42,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.network.InetAddresses;
@ -49,19 +50,18 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.query.QueryShardContext;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
@ -71,7 +71,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD;
import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD;
@ -92,12 +91,12 @@ public class RangeFieldMapper extends FieldMapper {
public static class Builder extends FieldMapper.Builder<Builder, RangeFieldMapper> {
private Boolean coerce;
private Locale locale;
private Locale locale = Locale.ROOT;
private String pattern;
public Builder(String name, RangeType type) {
super(name, new RangeFieldType(type), new RangeFieldType(type));
builder = this;
locale = Locale.ROOT;
}
@Override
@ -128,8 +127,8 @@ public class RangeFieldMapper extends FieldMapper {
return Defaults.COERCE;
}
public Builder dateTimeFormatter(DateFormatter dateTimeFormatter) {
fieldType().setDateTimeFormatter(dateTimeFormatter);
public Builder format(String format) {
this.pattern = format;
return this;
}
@ -145,12 +144,15 @@ public class RangeFieldMapper extends FieldMapper {
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
DateFormatter dateTimeFormatter = fieldType().dateTimeFormatter;
DateFormatter formatter = fieldType().dateTimeFormatter;
if (fieldType().rangeType == RangeType.DATE) {
if (!locale.equals(dateTimeFormatter.locale())) {
fieldType().setDateTimeFormatter(dateTimeFormatter.withLocale(locale));
boolean hasPatternChanged = Strings.hasLength(builder.pattern) &&
Objects.equals(builder.pattern, formatter.pattern()) == false;
if (hasPatternChanged || Objects.equals(builder.locale, formatter.locale()) == false) {
fieldType().setDateTimeFormatter(DateFormatter.forPattern(pattern).withLocale(locale));
}
} else if (dateTimeFormatter != null) {
} else if (pattern != null) {
throw new IllegalArgumentException("field [" + name() + "] of type [" + fieldType().rangeType
+ "] should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type");
}
@ -190,7 +192,7 @@ public class RangeFieldMapper extends FieldMapper {
builder.locale(LocaleUtils.parse(propNode.toString()));
iterator.remove();
} else if (propName.equals("format")) {
builder.dateTimeFormatter(parseDateTimeFormatter(propNode));
builder.format(propNode.toString());
iterator.remove();
} else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
@ -219,8 +221,8 @@ public class RangeFieldMapper extends FieldMapper {
RangeFieldType(RangeFieldType other) {
super(other);
this.rangeType = other.rangeType;
if (other.dateTimeFormatter() != null) {
setDateTimeFormatter(other.dateTimeFormatter);
if (other.rangeType == RangeType.DATE && other.dateTimeFormatter() != null) {
setDateTimeFormatter(other.dateTimeFormatter());
}
}
@ -235,15 +237,13 @@ public class RangeFieldMapper extends FieldMapper {
RangeFieldType that = (RangeFieldType) o;
return Objects.equals(rangeType, that.rangeType) &&
(rangeType == RangeType.DATE) ?
Objects.equals(dateTimeFormatter.pattern(), that.dateTimeFormatter.pattern())
&& Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale())
Objects.equals(dateTimeFormatter, that.dateTimeFormatter)
: dateTimeFormatter == null && that.dateTimeFormatter == null;
}
@Override
public int hashCode() {
return (dateTimeFormatter == null) ? Objects.hash(super.hashCode(), rangeType)
: Objects.hash(super.hashCode(), rangeType, dateTimeFormatter.pattern(), dateTimeFormatter.locale());
return Objects.hash(super.hashCode(), rangeType, dateTimeFormatter);
}
@Override
@ -285,7 +285,7 @@ public class RangeFieldMapper extends FieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) {
failIfNotIndexed();
if (parser == null) {
parser = dateMathParser();
@ -543,7 +543,8 @@ public class RangeFieldMapper extends FieldMapper {
return new LongRange(name, new long[] {((Number)r.from).longValue()}, new long[] {((Number)r.to).longValue()});
}
private Number parse(DateMathParser dateMathParser, String dateStr) {
return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");});
return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");})
.toEpochMilli();
}
@Override
public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included)
@ -586,18 +587,18 @@ public class RangeFieldMapper extends FieldMapper {
@Override
public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Object upperTerm, boolean includeLower,
boolean includeUpper, ShapeRelation relation, @Nullable DateTimeZone timeZone,
boolean includeUpper, ShapeRelation relation, @Nullable ZoneId timeZone,
@Nullable DateMathParser parser, QueryShardContext context) {
DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone;
ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(zone);
ZoneId zone = (timeZone == null) ? ZoneOffset.UTC : timeZone;
DateMathParser dateMathParser = (parser == null) ?
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser;
Long low = lowerTerm == null ? Long.MIN_VALUE :
dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(),
context::nowInMillis, false, zoneId);
context::nowInMillis, false, zone).toEpochMilli();
Long high = upperTerm == null ? Long.MAX_VALUE :
dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(),
context::nowInMillis, false, zoneId);
context::nowInMillis, false, zone).toEpochMilli();
return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone,
dateMathParser, context);
@ -910,7 +911,7 @@ public class RangeFieldMapper extends FieldMapper {
return numberType.parse(value, coerce);
}
public Query rangeQuery(String field, boolean hasDocValues, Object from, Object to, boolean includeFrom, boolean includeTo,
ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser dateMathParser,
ShapeRelation relation, @Nullable ZoneId timeZone, @Nullable DateMathParser dateMathParser,
QueryShardContext context) {
Object lower = from == null ? minValue() : parse(from, false);
Object upper = to == null ? maxValue() : parse(to, false);

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.ToXContent;
@ -46,7 +45,7 @@ public class RootObjectMapper extends ObjectMapper {
public static final DateFormatter[] DYNAMIC_DATE_TIME_FORMATTERS =
new DateFormatter[]{
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
Joda.getStrictStandardDateFormatter()
DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis")
};
public static final boolean DATE_DETECTION = true;
public static final boolean NUMERIC_DETECTION = false;
@ -55,8 +54,7 @@ public class RootObjectMapper extends ObjectMapper {
public static class Builder extends ObjectMapper.Builder<Builder, RootObjectMapper> {
protected Explicit<DynamicTemplate[]> dynamicTemplates = new Explicit<>(new DynamicTemplate[0], false);
protected Explicit<DateFormatter[]> dynamicDateTimeFormatters =
new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
protected Explicit<DateFormatter[]> dynamicDateTimeFormatters = new Explicit<>(Defaults.DYNAMIC_DATE_TIME_FORMATTERS, false);
protected Explicit<Boolean> dateDetection = new Explicit<>(Defaults.DATE_DETECTION, false);
protected Explicit<Boolean> numericDetection = new Explicit<>(Defaults.NUMERIC_DETECTION, false);

View File

@ -23,7 +23,8 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.joda.time.DateTimeZone;
import java.time.ZoneId;
/**
* {@link MappedFieldType} base impl for field types that are neither dates nor ranges.
@ -40,7 +41,7 @@ public abstract class SimpleMappedFieldType extends MappedFieldType {
@Override
public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) {
ShapeRelation relation, ZoneId timeZone, DateMathParser parser, QueryShardContext context) {
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
"] does not support DISJOINT ranges");
@ -52,7 +53,7 @@ public abstract class SimpleMappedFieldType extends MappedFieldType {
}
/**
* Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, DateTimeZone, DateMathParser, QueryShardContext)}
* Same as {@link #rangeQuery(Object, Object, boolean, boolean, ShapeRelation, ZoneId, DateMathParser, QueryShardContext)}
* but without the trouble of relations or date-specific options.
*/
protected Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,

View File

@ -43,7 +43,6 @@ public class TypeParsers {
public static final String INDEX_OPTIONS_POSITIONS = "positions";
public static final String INDEX_OPTIONS_OFFSETS = "offsets";
private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map<String, Object> fieldNode,
Mapper.TypeParser.ParserContext parserContext) {
NamedAnalyzer indexAnalyzer = null;

View File

@ -38,9 +38,9 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.search.QueryParserHelper;
import org.elasticsearch.index.search.QueryStringQueryParser;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
@ -144,7 +144,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
private Boolean lenient;
private DateTimeZone timeZone;
private ZoneId timeZone;
/** To limit effort spent determinizing regexp queries. */
private int maxDeterminizedStates = DEFAULT_MAX_DETERMINED_STATES;
@ -189,7 +189,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
rewrite = in.readOptionalString();
minimumShouldMatch = in.readOptionalString();
lenient = in.readOptionalBoolean();
timeZone = in.readOptionalTimeZone();
timeZone = in.readOptionalZoneId();
escape = in.readBoolean();
maxDeterminizedStates = in.readVInt();
if (in.getVersion().onOrAfter(Version.V_6_1_0)) {
@ -224,7 +224,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
out.writeOptionalString(this.rewrite);
out.writeOptionalString(this.minimumShouldMatch);
out.writeOptionalBoolean(this.lenient);
out.writeOptionalTimeZone(timeZone);
out.writeOptionalZoneId(timeZone);
out.writeBoolean(this.escape);
out.writeVInt(this.maxDeterminizedStates);
if (out.getVersion().onOrAfter(Version.V_6_1_0)) {
@ -510,19 +510,19 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
*/
public QueryStringQueryBuilder timeZone(String timeZone) {
if (timeZone != null) {
this.timeZone = DateTimeZone.forID(timeZone);
this.timeZone = ZoneId.of(timeZone);
} else {
this.timeZone = null;
}
return this;
}
public QueryStringQueryBuilder timeZone(DateTimeZone timeZone) {
public QueryStringQueryBuilder timeZone(ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}
public DateTimeZone timeZone() {
public ZoneId timeZone() {
return this.timeZone;
}
@ -621,7 +621,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
builder.field(LENIENT_FIELD.getPreferredName(), this.lenient);
}
if (this.timeZone != null) {
builder.field(TIME_ZONE_FIELD.getPreferredName(), this.timeZone.getID());
builder.field(TIME_ZONE_FIELD.getPreferredName(), this.timeZone.getId());
}
builder.field(ESCAPE_FIELD.getPreferredName(), this.escape);
builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery);
@ -810,8 +810,8 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
Objects.equals(lenient, other.lenient) &&
Objects.equals(
timeZone == null ? null : timeZone.getID(),
other.timeZone == null ? null : other.timeZone.getID()) &&
timeZone == null ? null : timeZone.getId(),
other.timeZone == null ? null : other.timeZone.getId()) &&
Objects.equals(escape, other.escape) &&
Objects.equals(maxDeterminizedStates, other.maxDeterminizedStates) &&
Objects.equals(autoGenerateSynonymsPhraseQuery, other.autoGenerateSynonymsPhraseQuery) &&
@ -824,7 +824,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
quoteFieldSuffix, allowLeadingWildcard, analyzeWildcard,
enablePositionIncrements, fuzziness, fuzzyPrefixLength,
fuzzyMaxExpansions, fuzzyRewrite, phraseSlop, type, tieBreaker, rewrite, minimumShouldMatch, lenient,
timeZone == null ? 0 : timeZone.getID(), escape, maxDeterminizedStates, autoGenerateSynonymsPhraseQuery,
timeZone == null ? 0 : timeZone.getId(), escape, maxDeterminizedStates, autoGenerateSynonymsPhraseQuery,
fuzzyTranspositions);
}

View File

@ -37,9 +37,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.DateTimeException;
import java.time.ZoneId;
import java.util.Objects;
/**
@ -64,19 +65,12 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
private static final ParseField RELATION_FIELD = new ParseField("relation");
private final String fieldName;
private Object from;
private Object to;
private DateTimeZone timeZone;
private ZoneId timeZone;
private boolean includeLower = DEFAULT_INCLUDE_LOWER;
private boolean includeUpper = DEFAULT_INCLUDE_UPPER;
private DateFormatter format;
private String format;
private ShapeRelation relation;
/**
@ -101,11 +95,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
to = in.readGenericValue();
includeLower = in.readBoolean();
includeUpper = in.readBoolean();
timeZone = in.readOptionalTimeZone();
String formatString = in.readOptionalString();
if (formatString != null) {
format = DateFormatter.forPattern(formatString);
}
timeZone = in.readOptionalZoneId();
format = in.readOptionalString();
String relationString = in.readOptionalString();
if (relationString != null) {
relation = ShapeRelation.getRelationByName(relationString);
@ -129,12 +120,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
out.writeGenericValue(this.to);
out.writeBoolean(this.includeLower);
out.writeBoolean(this.includeUpper);
out.writeOptionalTimeZone(timeZone);
String formatString = null;
if (this.format != null) {
formatString = this.format.pattern();
}
out.writeOptionalString(formatString);
out.writeOptionalZoneId(timeZone);
out.writeOptionalString(format);
String relationString = null;
if (this.relation != null) {
relationString = this.relation.getRelationName();
@ -267,7 +254,11 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
if (timeZone == null) {
throw new IllegalArgumentException("timezone cannot be null");
}
this.timeZone = DateTimeZone.forID(timeZone);
try {
this.timeZone = ZoneId.of(timeZone);
} catch (DateTimeException e) {
throw new IllegalArgumentException(e);
}
return this;
}
@ -275,10 +266,10 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
* In case of date field, gets the from/to fields timezone adjustment
*/
public String timeZone() {
return this.timeZone == null ? null : this.timeZone.getID();
return this.timeZone == null ? null : this.timeZone.getId();
}
DateTimeZone getDateTimeZone() { // for testing
ZoneId getDateTimeZone() { // for testing
return timeZone;
}
@ -289,7 +280,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
if (format == null) {
throw new IllegalArgumentException("format cannot be null");
}
this.format = DateFormatter.forPattern(format);
// this just ensure that the pattern is actually valid, no need to keep it here
DateFormatter.forPattern(format);
this.format = format;
return this;
}
@ -297,12 +290,12 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
* Gets the format field to parse the from/to fields
*/
public String format() {
return this.format == null ? null : this.format.pattern();
return format;
}
DateMathParser getForceDateParser() { // pkg private for testing
if (this.format != null) {
return this.format.toDateMathParser();
if (Strings.hasText(format)) {
return DateFormatter.forPattern(this.format).toDateMathParser();
}
return null;
}
@ -334,10 +327,10 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower);
builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper);
if (timeZone != null) {
builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getID());
builder.field(TIME_ZONE_FIELD.getPreferredName(), timeZone.getId());
}
if (format != null) {
builder.field(FORMAT_FIELD.getPreferredName(), format.pattern());
if (Strings.hasText(format)) {
builder.field(FORMAT_FIELD.getPreferredName(), format);
}
if (relation != null) {
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
@ -531,21 +524,17 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
@Override
protected int doHashCode() {
String timeZoneId = timeZone == null ? null : timeZone.getID();
String formatString = format == null ? null : format.pattern();
return Objects.hash(fieldName, from, to, timeZoneId, includeLower, includeUpper, formatString);
return Objects.hash(fieldName, from, to, timeZone, includeLower, includeUpper, format);
}
@Override
protected boolean doEquals(RangeQueryBuilder other) {
String timeZoneId = timeZone == null ? null : timeZone.getID();
String formatString = format == null ? null : format.pattern();
return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(from, other.from) &&
Objects.equals(to, other.to) &&
Objects.equals(timeZoneId, other.timeZone()) &&
Objects.equals(timeZone, other.timeZone) &&
Objects.equals(includeLower, other.includeLower) &&
Objects.equals(includeUpper, other.includeUpper) &&
Objects.equals(formatString, other.format());
Objects.equals(format, other.format);
}
}

View File

@ -54,9 +54,9 @@ import org.elasticsearch.index.query.ExistsQueryBuilder;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@ -89,7 +89,7 @@ public class QueryStringQueryParser extends XQueryParser {
private Analyzer forceQuoteAnalyzer;
private String quoteFieldSuffix;
private boolean analyzeWildcard;
private DateTimeZone timeZone;
private ZoneId timeZone;
private Fuzziness fuzziness = Fuzziness.AUTO;
private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions;
private MappedFieldType currentFieldType;
@ -227,7 +227,7 @@ public class QueryStringQueryParser extends XQueryParser {
/**
* @param timeZone Time Zone to be applied to any range query related to dates.
*/
public void setTimeZone(DateTimeZone timeZone) {
public void setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
}

View File

@ -19,9 +19,6 @@
package org.elasticsearch.ingest;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.Set;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
@ -37,12 +34,15 @@ import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Represents a single document being captured before indexing and holds the source and metadata (like id, type and index).

View File

@ -22,7 +22,6 @@ package org.elasticsearch.monitor.jvm;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import java.lang.management.ManagementFactory;
@ -43,7 +42,7 @@ public class HotThreads {
private static final Object mutex = new Object();
private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime");
private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("dateOptionalTime");
private int busiestThreads = 3;
private TimeValue interval = new TimeValue(500, TimeUnit.MILLISECONDS);

View File

@ -39,7 +39,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.rest.RestController;
@ -61,6 +61,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
public class RestIndicesAction extends AbstractCatAction {
private static final DateFormatter STRICT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time");
private final IndexNameExpressionResolver indexNameExpressionResolver;
public RestIndicesAction(Settings settings, RestController controller, IndexNameExpressionResolver indexNameExpressionResolver) {
@ -432,7 +433,7 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell(indexMetaData.getCreationDate());
ZonedDateTime creationTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC);
table.addCell(DateFormatters.forPattern("strict_date_time").format(creationTime));
table.addCell(STRICT_DATE_TIME_FORMATTER.format(creationTime));
table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());

View File

@ -26,7 +26,6 @@ import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
@ -99,7 +98,7 @@ public class RestSnapshotAction extends AbstractCatAction {
.endHeaders();
}
private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private Table buildTable(RestRequest req, GetSnapshotsResponse getSnapshotsResponse) {
Table table = getTableWithHeader(req);

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.Table;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
@ -125,7 +124,7 @@ public class RestTasksAction extends AbstractCatAction {
return table;
}
private static final DateFormatter FORMATTER = DateFormatters.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private static final DateFormatter FORMATTER = DateFormatter.forPattern("HH:mm:ss").withZone(ZoneOffset.UTC);
private void buildRow(Table table, boolean fullId, boolean detailed, DiscoveryNodes discoveryNodes, TaskInfo taskInfo) {
table.startRow();

View File

@ -23,7 +23,6 @@ import org.apache.logging.log4j.LogManager;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateUtils;
import org.joda.time.DateTime;
@ -50,7 +49,7 @@ import java.util.Objects;
* A wrapper around ZonedDateTime that exposes joda methods for backcompat.
*/
public class JodaCompatibleZonedDateTime {
private static final DateFormatter DATE_FORMATTER = DateFormatters.forPattern("strict_date_time");
private static final DateFormatter DATE_FORMATTER = DateFormatter.forPattern("strict_date_time");
private static final DeprecationLogger deprecationLogger =
new DeprecationLogger(LogManager.getLogger(JodaCompatibleZonedDateTime.class));

View File

@ -212,7 +212,7 @@ public final class ScoreScriptUtils {
double scaling;
public DecayDateLinear(String originStr, String scaleStr, String offsetStr, double decay) {
this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
@ -235,7 +235,7 @@ public final class ScoreScriptUtils {
double scaling;
public DecayDateExp(String originStr, String scaleStr, String offsetStr, double decay) {
this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")
@ -258,7 +258,7 @@ public final class ScoreScriptUtils {
double scaling;
public DecayDateGauss(String originStr, String scaleStr, String offsetStr, double decay) {
this.origin = dateParser.parse(originStr, null, false, defaultZoneId);
this.origin = dateParser.parse(originStr, null, false, defaultZoneId).toEpochMilli();
long scale = TimeValue.parseTimeValue(scaleStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".scale")
.getMillis();
this.offset = TimeValue.parseTimeValue(offsetStr, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset")

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search;
import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
@ -30,7 +31,6 @@ import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.net.InetAddress;
@ -38,6 +38,7 @@ import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Base64;
@ -164,20 +165,24 @@ public interface DocValueFormat extends NamedWriteable {
public static final String NAME = "date_time";
final DateFormatter formatter;
// TODO: change this to ZoneId, but will require careful change to serialization
final DateTimeZone timeZone;
private final ZoneId zoneId;
final ZoneId timeZone;
private final DateMathParser parser;
public DateTime(DateFormatter formatter, DateTimeZone timeZone) {
this.formatter = Objects.requireNonNull(formatter);
public DateTime(DateFormatter formatter, ZoneId timeZone) {
this.formatter = formatter;
this.timeZone = Objects.requireNonNull(timeZone);
this.zoneId = DateUtils.dateTimeZoneToZoneId(timeZone);
this.parser = formatter.toDateMathParser();
}
public DateTime(StreamInput in) throws IOException {
this(DateFormatter.forPattern(in.readString()), DateTimeZone.forID(in.readString()));
this.formatter = DateFormatter.forPattern(in.readString());
this.parser = formatter.toDateMathParser();
String zoneId = in.readString();
if (in.getVersion().before(Version.V_7_0_0)) {
this.timeZone = DateUtils.of(zoneId);
} else {
this.timeZone = ZoneId.of(zoneId);
}
}
@Override
@ -188,12 +193,16 @@ public interface DocValueFormat extends NamedWriteable {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(formatter.pattern());
out.writeString(timeZone.getID());
if (out.getVersion().before(Version.V_7_0_0)) {
out.writeString(DateUtils.zoneIdToDateTimeZone(timeZone).getID());
} else {
out.writeString(timeZone.getId());
}
}
@Override
public String format(long value) {
return formatter.withZone(zoneId).formatMillis(value);
return formatter.format(Instant.ofEpochMilli(value).atZone(timeZone));
}
@Override
@ -203,7 +212,7 @@ public interface DocValueFormat extends NamedWriteable {
@Override
public long parseLong(String value, boolean roundUp, LongSupplier now) {
return parser.parse(value, now, roundUp, DateUtils.dateTimeZoneToZoneId(timeZone));
return parser.parse(value, now, roundUp, timeZone).toEpochMilli();
}
@Override

View File

@ -19,11 +19,12 @@
package org.elasticsearch.search.aggregations.bucket.composite;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -37,9 +38,10 @@ import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Objects;
import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS;
@ -70,9 +72,9 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
}, Histogram.INTERVAL_FIELD, ObjectParser.ValueType.LONG);
PARSER.declareField(DateHistogramValuesSourceBuilder::timeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return DateTimeZone.forID(p.text());
return ZoneId.of(p.text());
} else {
return DateTimeZone.forOffsetHours(p.intValue());
return ZoneOffset.ofHours(p.intValue());
}
}, new ParseField("time_zone"), ObjectParser.ValueType.LONG);
CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC);
@ -82,7 +84,7 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
}
private long interval = 0;
private DateTimeZone timeZone = null;
private ZoneId timeZone = null;
private DateHistogramInterval dateHistogramInterval;
public DateHistogramValuesSourceBuilder(String name) {
@ -93,8 +95,10 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
super(in);
this.interval = in.readLong();
this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
if (in.readBoolean()) {
timeZone = DateTimeZone.forID(in.readString());
if (in.getVersion().before(Version.V_7_0_0)) {
this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone());
} else {
this.timeZone = in.readOptionalZoneId();
}
}
@ -102,10 +106,10 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeLong(interval);
out.writeOptionalWriteable(dateHistogramInterval);
boolean hasTimeZone = timeZone != null;
out.writeBoolean(hasTimeZone);
if (hasTimeZone) {
out.writeString(timeZone.getID());
if (out.getVersion().before(Version.V_7_0_0)) {
out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone));
} else {
out.writeOptionalZoneId(timeZone);
}
}
@ -176,7 +180,7 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
/**
* Sets the time zone to use for this aggregation
*/
public DateHistogramValuesSourceBuilder timeZone(DateTimeZone timeZone) {
public DateHistogramValuesSourceBuilder timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]");
}
@ -187,14 +191,14 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
/**
* Gets the time zone to use for this aggregation
*/
public DateTimeZone timeZone() {
public ZoneId timeZone() {
return timeZone;
}
private Rounding createRounding() {
Rounding.Builder tzRoundingBuilder;
if (dateHistogramInterval != null) {
DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
Rounding.DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
if (dateTimeUnit != null) {
tzRoundingBuilder = Rounding.builder(dateTimeUnit);
} else {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.composite;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.aggregations.support.ValuesSource;

View File

@ -20,11 +20,10 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -42,9 +41,9 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Map;
import java.util.Objects;
@ -70,19 +69,19 @@ public class AutoDateHistogramAggregationBuilder
* The current implementation probably should not be invoked in a tight loop.
* @return Array of RoundingInfo
*/
static RoundingInfo[] buildRoundings(DateTimeZone timeZone) {
static RoundingInfo[] buildRoundings(ZoneId timeZone) {
RoundingInfo[] roundings = new RoundingInfo[6];
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, "s" , 1, 5, 10, 30);
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
roundings[0] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, "s", 1, 5, 10, 30);
roundings[1] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, "m", 1, 5, 10, 30);
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, "h", 1, 3, 12);
roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone),
roundings[2] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, "h",1, 3, 12);
roundings[3] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.DAY_OF_MONTH, timeZone),
24 * 60 * 60 * 1000L, "d", 1, 7);
roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone),
roundings[4] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.MONTH_OF_YEAR, timeZone),
30 * 24 * 60 * 60 * 1000L, "M", 1, 3);
roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone),
roundings[5] = new RoundingInfo(createRounding(Rounding.DateTimeUnit.YEAR_OF_CENTURY, timeZone),
365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100);
return roundings;
}
@ -156,7 +155,7 @@ public class AutoDateHistogramAggregationBuilder
return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData);
}
static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) {
static Rounding createRounding(Rounding.DateTimeUnit interval, ZoneId timeZone) {
Rounding.Builder tzRoundingBuilder = Rounding.builder(interval);
if (timeZone != null) {
tzRoundingBuilder.timeZone(timeZone);
@ -196,7 +195,7 @@ public class AutoDateHistogramAggregationBuilder
}
public RoundingInfo(StreamInput in) throws IOException {
rounding = Rounding.Streams.read(in);
rounding = Rounding.read(in);
roughEstimateDurationMillis = in.readVLong();
innerIntervals = in.readIntArray();
unitAbbreviation = in.readString();
@ -204,7 +203,7 @@ public class AutoDateHistogramAggregationBuilder
@Override
public void writeTo(StreamOutput out) throws IOException {
Rounding.Streams.write(rounding, out);
rounding.writeTo(out);
out.writeVLong(roughEstimateDurationMillis);
out.writeIntArray(innerIntervals);
out.writeString(unitAbbreviation);

View File

@ -23,8 +23,8 @@ import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;

View File

@ -23,10 +23,9 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.unit.TimeValue;
@ -54,10 +53,12 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeField;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.zone.ZoneOffsetTransition;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -70,29 +71,30 @@ import static java.util.Collections.unmodifiableMap;
*/
public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, DateHistogramAggregationBuilder>
implements MultiBucketAggregationBuilder {
public static final String NAME = "date_histogram";
private static DateMathParser EPOCH_MILLIS_PARSER = DateFormatter.forPattern("epoch_millis").toDateMathParser();
public static final Map<String, DateTimeUnit> DATE_FIELD_UNITS;
public static final Map<String, Rounding.DateTimeUnit> DATE_FIELD_UNITS;
static {
Map<String, DateTimeUnit> dateFieldUnits = new HashMap<>();
dateFieldUnits.put("year", DateTimeUnit.YEAR_OF_CENTURY);
dateFieldUnits.put("1y", DateTimeUnit.YEAR_OF_CENTURY);
dateFieldUnits.put("quarter", DateTimeUnit.QUARTER);
dateFieldUnits.put("1q", DateTimeUnit.QUARTER);
dateFieldUnits.put("month", DateTimeUnit.MONTH_OF_YEAR);
dateFieldUnits.put("1M", DateTimeUnit.MONTH_OF_YEAR);
dateFieldUnits.put("week", DateTimeUnit.WEEK_OF_WEEKYEAR);
dateFieldUnits.put("1w", DateTimeUnit.WEEK_OF_WEEKYEAR);
dateFieldUnits.put("day", DateTimeUnit.DAY_OF_MONTH);
dateFieldUnits.put("1d", DateTimeUnit.DAY_OF_MONTH);
dateFieldUnits.put("hour", DateTimeUnit.HOUR_OF_DAY);
dateFieldUnits.put("1h", DateTimeUnit.HOUR_OF_DAY);
dateFieldUnits.put("minute", DateTimeUnit.MINUTES_OF_HOUR);
dateFieldUnits.put("1m", DateTimeUnit.MINUTES_OF_HOUR);
dateFieldUnits.put("second", DateTimeUnit.SECOND_OF_MINUTE);
dateFieldUnits.put("1s", DateTimeUnit.SECOND_OF_MINUTE);
Map<String, Rounding.DateTimeUnit> dateFieldUnits = new HashMap<>();
dateFieldUnits.put("year", Rounding.DateTimeUnit.YEAR_OF_CENTURY);
dateFieldUnits.put("1y", Rounding.DateTimeUnit.YEAR_OF_CENTURY);
dateFieldUnits.put("quarter", Rounding.DateTimeUnit.QUARTER_OF_YEAR);
dateFieldUnits.put("1q", Rounding.DateTimeUnit.QUARTER_OF_YEAR);
dateFieldUnits.put("month", Rounding.DateTimeUnit.MONTH_OF_YEAR);
dateFieldUnits.put("1M", Rounding.DateTimeUnit.MONTH_OF_YEAR);
dateFieldUnits.put("week", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR);
dateFieldUnits.put("1w", Rounding.DateTimeUnit.WEEK_OF_WEEKYEAR);
dateFieldUnits.put("day", Rounding.DateTimeUnit.DAY_OF_MONTH);
dateFieldUnits.put("1d", Rounding.DateTimeUnit.DAY_OF_MONTH);
dateFieldUnits.put("hour", Rounding.DateTimeUnit.HOUR_OF_DAY);
dateFieldUnits.put("1h", Rounding.DateTimeUnit.HOUR_OF_DAY);
dateFieldUnits.put("minute", Rounding.DateTimeUnit.MINUTES_OF_HOUR);
dateFieldUnits.put("1m", Rounding.DateTimeUnit.MINUTES_OF_HOUR);
dateFieldUnits.put("second", Rounding.DateTimeUnit.SECOND_OF_MINUTE);
dateFieldUnits.put("1s", Rounding.DateTimeUnit.SECOND_OF_MINUTE);
DATE_FIELD_UNITS = unmodifiableMap(dateFieldUnits);
}
@ -369,11 +371,11 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
* coordinating node in order to generate missing buckets, which may cross a transition
* even though data on the shards doesn't.
*/
DateTimeZone rewriteTimeZone(QueryShardContext context) throws IOException {
final DateTimeZone tz = timeZone();
ZoneId rewriteTimeZone(QueryShardContext context) throws IOException {
final ZoneId tz = timeZone();
if (field() != null &&
tz != null &&
tz.isFixed() == false &&
tz.getRules().isFixedOffset() == false &&
field() != null &&
script() == null) {
final MappedFieldType ft = context.fieldMapper(field());
@ -391,16 +393,29 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
}
if (anyInstant != null) {
final long prevTransition = tz.previousTransition(anyInstant);
final long nextTransition = tz.nextTransition(anyInstant);
Instant instant = Instant.ofEpochMilli(anyInstant);
ZoneOffsetTransition prevOffsetTransition = tz.getRules().previousTransition(instant);
final long prevTransition;
if (prevOffsetTransition != null) {
prevTransition = prevOffsetTransition.getInstant().toEpochMilli();
} else {
prevTransition = instant.toEpochMilli();
}
ZoneOffsetTransition nextOffsetTransition = tz.getRules().nextTransition(instant);
final long nextTransition;
if (nextOffsetTransition != null) {
nextTransition = nextOffsetTransition.getInstant().toEpochMilli();
} else {
nextTransition = instant.toEpochMilli();
}
// We need all not only values but also rounded values to be within
// [prevTransition, nextTransition].
final long low;
DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
if (intervalAsUnit != null) {
final DateTimeField dateTimeField = intervalAsUnit.field(tz);
low = dateTimeField.roundCeiling(prevTransition);
Rounding rounding = Rounding.builder(intervalAsUnit).timeZone(timeZone()).build();
low = rounding.nextRoundingValue(prevTransition);
} else {
final TimeValue intervalAsMillis = getIntervalAsTimeValue();
low = Math.addExact(prevTransition, intervalAsMillis.millis());
@ -408,12 +423,12 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
// rounding rounds down, so 'nextTransition' is a good upper bound
final long high = nextTransition;
if (ft.isFieldWithinQuery(reader, low, high, true, false, DateTimeZone.UTC, EPOCH_MILLIS_PARSER,
if (ft.isFieldWithinQuery(reader, low, high, true, false, ZoneOffset.UTC, EPOCH_MILLIS_PARSER,
context) == Relation.WITHIN) {
// All values in this reader have the same offset despite daylight saving times.
// This is very common for location-based timezones such as Europe/Paris in
// combination with time-based indices.
return DateTimeZone.forOffsetMillis(tz.getOffset(anyInstant));
return ZoneOffset.ofTotalSeconds(tz.getRules().getOffset(instant).getTotalSeconds());
}
}
}
@ -424,9 +439,9 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
@Override
protected ValuesSourceAggregatorFactory<Numeric, ?> innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
final DateTimeZone tz = timeZone();
final ZoneId tz = timeZone();
final Rounding rounding = createRounding(tz);
final DateTimeZone rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext());
final ZoneId rewrittenTimeZone = rewriteTimeZone(context.getQueryShardContext());
final Rounding shardRounding;
if (tz == rewrittenTimeZone) {
shardRounding = rounding;
@ -447,7 +462,7 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
* {@code null} then it means that the interval is expressed as a fixed
* {@link TimeValue} and may be accessed via
* {@link #getIntervalAsTimeValue()}. */
private DateTimeUnit getIntervalAsDateTimeUnit() {
private Rounding.DateTimeUnit getIntervalAsDateTimeUnit() {
if (dateHistogramInterval != null) {
return DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
}
@ -466,9 +481,9 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
}
}
private Rounding createRounding(DateTimeZone timeZone) {
private Rounding createRounding(ZoneId timeZone) {
Rounding.Builder tzRoundingBuilder;
DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
Rounding.DateTimeUnit intervalAsUnit = getIntervalAsDateTimeUnit();
if (intervalAsUnit != null) {
tzRoundingBuilder = Rounding.builder(intervalAsUnit);
} else {

View File

@ -23,8 +23,8 @@ import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;

View File

@ -21,10 +21,10 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentFragment;

View File

@ -19,9 +19,9 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregations;
@ -32,10 +32,10 @@ import org.elasticsearch.search.aggregations.KeyComparable;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -108,7 +108,7 @@ public final class InternalAutoDateHistogram extends
@Override
public Object getKey() {
return new DateTime(key, DateTimeZone.UTC);
return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override

View File

@ -20,9 +20,9 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregations;
@ -34,10 +34,10 @@ import org.elasticsearch.search.aggregations.InternalOrder;
import org.elasticsearch.search.aggregations.KeyComparable;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@ -112,7 +112,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
@Override
public Object getKey() {
return new DateTime(key, DateTimeZone.UTC);
return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
@Override
@ -185,13 +185,13 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
}
EmptyBucketInfo(StreamInput in) throws IOException {
rounding = Rounding.Streams.read(in);
rounding = Rounding.read(in);
subAggregations = InternalAggregations.readAggregations(in);
bounds = in.readOptionalWriteable(ExtendedBounds::new);
}
void writeTo(StreamOutput out) throws IOException {
Rounding.Streams.write(rounding, out);
rounding.writeTo(out);
subAggregations.writeTo(out);
out.writeOptionalWriteable(bounds);
}

View File

@ -24,10 +24,10 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.List;
public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation<ParsedAutoDateHistogram.ParsedBucket> implements Histogram {
@ -83,7 +83,7 @@ public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation<Parsed
@Override
public Object getKey() {
if (key != null) {
return new DateTime(key, DateTimeZone.UTC);
return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
return null;
}

View File

@ -23,10 +23,10 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.List;
public class ParsedDateHistogram extends ParsedMultiBucketAggregation<ParsedDateHistogram.ParsedBucket> implements Histogram {
@ -62,7 +62,7 @@ public class ParsedDateHistogram extends ParsedMultiBucketAggregation<ParsedDate
@Override
public Object getKey() {
if (key != null) {
return new DateTime(key, DateTimeZone.UTC);
return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC);
}
return null;
}

View File

@ -30,9 +30,9 @@ import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTime;
import java.io.IOException;
import java.time.ZonedDateTime;
import java.util.Map;
public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> {
@ -224,24 +224,24 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
* @param to
* the upper bound on the dates, exclusive
*/
public DateRangeAggregationBuilder addRange(String key, DateTime from, DateTime to) {
public DateRangeAggregationBuilder addRange(String key, ZonedDateTime from, ZonedDateTime to) {
addRange(new RangeAggregator.Range(key, convertDateTime(from), convertDateTime(to)));
return this;
}
private static Double convertDateTime(DateTime dateTime) {
private static Double convertDateTime(ZonedDateTime dateTime) {
if (dateTime == null) {
return null;
} else {
return (double) dateTime.getMillis();
return (double) dateTime.toInstant().toEpochMilli();
}
}
/**
* Same as {@link #addRange(String, DateTime, DateTime)} but the key will be
* Same as {@link #addRange(String, ZonedDateTime, ZonedDateTime)} but the key will be
* automatically generated based on <code>from</code> and <code>to</code>.
*/
public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
public DateRangeAggregationBuilder addRange(ZonedDateTime from, ZonedDateTime to) {
return addRange(null, from, to);
}
@ -253,16 +253,16 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
* @param to
* the upper bound on the dates, exclusive
*/
public DateRangeAggregationBuilder addUnboundedTo(String key, DateTime to) {
public DateRangeAggregationBuilder addUnboundedTo(String key, ZonedDateTime to) {
addRange(new RangeAggregator.Range(key, null, convertDateTime(to)));
return this;
}
/**
* Same as {@link #addUnboundedTo(String, DateTime)} but the key will be
* Same as {@link #addUnboundedTo(String, ZonedDateTime)} but the key will be
* computed automatically.
*/
public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
public DateRangeAggregationBuilder addUnboundedTo(ZonedDateTime to) {
return addUnboundedTo(null, to);
}
@ -274,16 +274,16 @@ public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeA
* @param from
* the lower bound on the distances, inclusive
*/
public DateRangeAggregationBuilder addUnboundedFrom(String key, DateTime from) {
public DateRangeAggregationBuilder addUnboundedFrom(String key, ZonedDateTime from) {
addRange(new RangeAggregator.Range(key, convertDateTime(from), null));
return this;
}
/**
* Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be
* Same as {@link #addUnboundedFrom(String, ZonedDateTime)} but the key will be
* computed automatically.
*/
public DateRangeAggregationBuilder addUnboundedFrom(DateTime from) {
public DateRangeAggregationBuilder addUnboundedFrom(ZonedDateTime from) {
return addUnboundedFrom(null, from);
}

View File

@ -24,10 +24,10 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.List;
import java.util.Map;
@ -48,12 +48,14 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket, I
@Override
public Object getFrom() {
return Double.isInfinite(((Number) from).doubleValue()) ? null : new DateTime(((Number) from).longValue(), DateTimeZone.UTC);
return Double.isInfinite(((Number) from).doubleValue()) ? null :
Instant.ofEpochMilli(((Number) from).longValue()).atZone(ZoneOffset.UTC);
}
@Override
public Object getTo() {
return Double.isInfinite(((Number) to).doubleValue()) ? null : new DateTime(((Number) to).longValue(), DateTimeZone.UTC);
return Double.isInfinite(((Number) to).doubleValue()) ? null :
Instant.ofEpochMilli(((Number) to).longValue()).atZone(ZoneOffset.UTC);
}
private Double internalGetFrom() {

View File

@ -21,10 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
public class ParsedDateRange extends ParsedRange {
@ -59,11 +60,11 @@ public class ParsedDateRange extends ParsedRange {
return doubleAsDateTime(to);
}
private static DateTime doubleAsDateTime(Double d) {
private static ZonedDateTime doubleAsDateTime(Double d) {
if (d == null || Double.isInfinite(d)) {
return null;
}
return new DateTime(d.longValue(), DateTimeZone.UTC);
return Instant.ofEpochMilli(d.longValue()).atZone(ZoneOffset.UTC);
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {

View File

@ -21,9 +21,9 @@ package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@ -34,7 +34,6 @@ import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
@ -139,9 +138,9 @@ public class DerivativePipelineAggregationBuilder extends AbstractPipelineAggreg
}
Long xAxisUnits = null;
if (units != null) {
DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units);
Rounding.DateTimeUnit dateTimeUnit = DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(units);
if (dateTimeUnit != null) {
xAxisUnits = dateTimeUnit.field(DateTimeZone.UTC).getDurationField().getUnitMillis();
xAxisUnits = dateTimeUnit.getField().getBaseUnit().getDuration().toMillis();
} else {
TimeValue timeValue = TimeValue.parseTimeValue(units, null, getClass().getSimpleName() + ".unit");
if (timeValue != null) {

View File

@ -19,19 +19,22 @@
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Objects;
import java.util.function.BiFunction;
@ -39,7 +42,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
private String fieldName;
private Object missing;
private Script script;
private DateTimeZone timeZone;
private ZoneId timeZone;
private static final String NAME = "field_config";
@ -62,16 +65,16 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
if (timezoneAware) {
parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return DateTimeZone.forID(p.text());
return ZoneId.of(p.text());
} else {
return DateTimeZone.forOffsetHours(p.intValue());
return ZoneOffset.ofHours(p.intValue());
}
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
}
return parser;
};
private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, DateTimeZone timeZone) {
private MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone) {
this.fieldName = fieldName;
this.missing = missing;
this.script = script;
@ -82,7 +85,11 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
this.fieldName = in.readString();
this.missing = in.readGenericValue();
this.script = in.readOptionalWriteable(Script::new);
this.timeZone = in.readOptionalTimeZone();
if (in.getVersion().before(Version.V_7_0_0)) {
this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone());
} else {
this.timeZone = in.readOptionalZoneId();
}
}
public Object getMissing() {
@ -93,7 +100,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
return script;
}
public DateTimeZone getTimeZone() {
public ZoneId getTimeZone() {
return timeZone;
}
@ -106,7 +113,11 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
out.writeString(fieldName);
out.writeGenericValue(missing);
out.writeOptionalWriteable(script);
out.writeOptionalTimeZone(timeZone);
if (out.getVersion().before(Version.V_7_0_0)) {
out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone));
} else {
out.writeOptionalZoneId(timeZone);
}
}
@Override
@ -122,7 +133,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName);
}
if (timeZone != null) {
builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getID());
builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getId());
}
builder.endObject();
return builder;
@ -153,7 +164,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
private String fieldName;
private Object missing = null;
private Script script = null;
private DateTimeZone timeZone = null;
private ZoneId timeZone = null;
public String getFieldName() {
return fieldName;
@ -182,11 +193,11 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject
return this;
}
public DateTimeZone getTimeZone() {
public ZoneId getTimeZone() {
return timeZone;
}
public Builder setTimeZone(DateTimeZone timeZone) {
public Builder setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}

View File

@ -28,9 +28,9 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.DocValueFormat;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneOffset;
public enum ValueType implements Writeable {
@ -42,7 +42,7 @@ public enum ValueType implements Writeable {
DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),
DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class,
new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, DateTimeZone.UTC)),
new DocValueFormat.DateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ZoneOffset.UTC)),
IP((byte) 6, "ip", "ip", ValuesSourceType.BYTES, IndexFieldData.class, DocValueFormat.IP),
// TODO: what is the difference between "number" and "numeric"?
NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW),

View File

@ -18,8 +18,10 @@
*/
package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
@ -28,9 +30,9 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.internal.SearchContext;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Map;
import java.util.Objects;
@ -81,7 +83,7 @@ public abstract class ValuesSourceAggregationBuilder<VS extends ValuesSource, AB
private ValueType valueType = null;
private String format = null;
private Object missing = null;
private DateTimeZone timeZone = null;
private ZoneId timeZone = null;
protected ValuesSourceConfig<VS> config;
protected ValuesSourceAggregationBuilder(String name, ValuesSourceType valuesSourceType, ValueType targetValueType) {
@ -144,8 +146,10 @@ public abstract class ValuesSourceAggregationBuilder<VS extends ValuesSource, AB
}
format = in.readOptionalString();
missing = in.readGenericValue();
if (in.readBoolean()) {
timeZone = DateTimeZone.forID(in.readString());
if (in.getVersion().before(Version.V_7_0_0)) {
timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone());
} else {
timeZone = in.readOptionalZoneId();
}
}
@ -167,10 +171,10 @@ public abstract class ValuesSourceAggregationBuilder<VS extends ValuesSource, AB
}
out.writeOptionalString(format);
out.writeGenericValue(missing);
boolean hasTimeZone = timeZone != null;
out.writeBoolean(hasTimeZone);
if (hasTimeZone) {
out.writeString(timeZone.getID());
if (out.getVersion().before(Version.V_7_0_0)) {
out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone));
} else {
out.writeOptionalZoneId(timeZone);
}
innerWriteTo(out);
}
@ -289,7 +293,7 @@ public abstract class ValuesSourceAggregationBuilder<VS extends ValuesSource, AB
* Sets the time zone to use for this aggregation
*/
@SuppressWarnings("unchecked")
public AB timeZone(DateTimeZone timeZone) {
public AB timeZone(ZoneId timeZone) {
if (timeZone == null) {
throw new IllegalArgumentException("[timeZone] must not be null: [" + name + "]");
}
@ -300,7 +304,7 @@ public abstract class ValuesSourceAggregationBuilder<VS extends ValuesSource, AB
/**
* Gets the time zone to use for this aggregation
*/
public DateTimeZone timeZone() {
public ZoneId timeZone() {
return timeZone;
}

View File

@ -32,7 +32,9 @@ import org.elasticsearch.script.AggregationScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.joda.time.DateTimeZone;
import java.time.ZoneId;
import java.time.ZoneOffset;
/**
* A configuration that tells aggregations how to retrieve data from the index
@ -48,7 +50,7 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
ValueType valueType,
String field, Script script,
Object missing,
DateTimeZone timeZone,
ZoneId timeZone,
String format) {
if (field == null) {
@ -121,7 +123,7 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
}
}
private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable DateTimeZone tz) {
private static DocValueFormat resolveFormat(@Nullable String format, @Nullable ValueType valueType, @Nullable ZoneId tz) {
if (valueType == null) {
return DocValueFormat.RAW; // we can't figure it out
}
@ -130,7 +132,7 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
valueFormat = new DocValueFormat.Decimal(format);
}
if (valueFormat instanceof DocValueFormat.DateTime && format != null) {
valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : DateTimeZone.UTC);
valueFormat = new DocValueFormat.DateTime(DateFormatter.forPattern(format), tz != null ? tz : ZoneOffset.UTC);
}
return valueFormat;
}
@ -142,7 +144,7 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
private boolean unmapped = false;
private DocValueFormat format = DocValueFormat.RAW;
private Object missing;
private DateTimeZone timeZone;
private ZoneId timeZone;
public ValuesSourceConfig(ValuesSourceType valueSourceType) {
this.valueSourceType = valueSourceType;
@ -206,12 +208,12 @@ public class ValuesSourceConfig<VS extends ValuesSource> {
return this.missing;
}
public ValuesSourceConfig<VS> timezone(final DateTimeZone timeZone) {
this.timeZone= timeZone;
public ValuesSourceConfig<VS> timezone(final ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}
public DateTimeZone timezone() {
public ZoneId timezone() {
return this.timeZone;
}

View File

@ -25,7 +25,9 @@ import org.elasticsearch.common.xcontent.AbstractObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.joda.time.DateTimeZone;
import java.time.ZoneId;
import java.time.ZoneOffset;
public final class ValuesSourceParserHelper {
@ -91,9 +93,9 @@ public final class ValuesSourceParserHelper {
if (timezoneAware) {
objectParser.declareField(ValuesSourceAggregationBuilder::timeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return DateTimeZone.forID(p.text());
return ZoneId.of(p.text());
} else {
return DateTimeZone.forOffsetHours(p.intValue());
return ZoneOffset.ofHours(p.intValue());
}
}, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG);
}

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
@ -52,7 +51,7 @@ public final class SnapshotInfo implements Comparable<SnapshotInfo>, ToXContent,
public static final String CONTEXT_MODE_PARAM = "context_mode";
public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT";
private static final DateFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime");
private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strictDateOptionalTime");
private static final String SNAPSHOT = "snapshot";
private static final String UUID = "uuid";
private static final String INDICES = "indices";

View File

@ -25,7 +25,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@ -276,7 +276,7 @@ public class RolloverIT extends ESIntegTestCase {
public void testRolloverWithDateMath() {
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear());
String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
String index = "test-" + DateFormatter.forPattern("YYYY.MM.dd").format(now) + "-1";
String dateMathExp = "<test-{now/d}-1>";
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
ensureGreen(index);
@ -290,14 +290,14 @@ public class RolloverIT extends ESIntegTestCase {
ensureGreen(index);
RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo(index));
assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.getOldIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
@ -310,8 +310,8 @@ public class RolloverIT extends ESIntegTestCase {
IndexMetaData.SETTING_INDEX_PROVIDED_NAME));
response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("<test-{now/d}-000004>").get();
assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004"));
assertThat(response.getOldIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.getNewIndex(), equalTo("test-" + DateFormatter.forPattern("YYYY.MM.dd").format(now) + "-000004"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));

View File

@ -93,25 +93,25 @@ public class DateMathExpressionResolverTests extends ESTestCase {
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037")
public void testExpression_CustomFormat() throws Exception {
List<String> results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd}}>"));
List<String> results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>"));
assertThat(results.size(), equalTo(1));
assertThat(results.get(0),
equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
}
public void testExpression_EscapeStatic() throws Exception {
List<String> result = expressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>"));
assertThat(result.size(), equalTo(1));
assertThat(result.get(0),
equalTo(".mar{v}el-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
equalTo(".mar{v}el-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(new DateTime(context.getStartTime(), UTC))));
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37037")
public void testExpression_EscapeDateFormat() throws Exception {
List<String> result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'YYYY}}>"));
List<String> result = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>"));
assertThat(result.size(), equalTo(1));
assertThat(result.get(0),
equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'YYYY").print(new DateTime(context.getStartTime(), UTC))));
equalTo(".marvel-" + DateTimeFormat.forPattern("'{year}'yyyy").print(new DateTime(context.getStartTime(), UTC))));
}
public void testExpression_MixedArray() throws Exception {
@ -150,10 +150,10 @@ public class DateMathExpressionResolverTests extends ESTestCase {
now = DateTime.now(UTC).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0);
}
Context context = new Context(this.context.getState(), this.context.getOptions(), now.getMillis());
List<String> results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{YYYY.MM.dd|" + timeZone.getID() + "}}>"));
List<String> results = expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getID() + "}}>"));
assertThat(results.size(), equalTo(1));
logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0));
assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now.withZone(timeZone))));
assertThat(results.get(0), equalTo(".marvel-" + DateTimeFormat.forPattern("yyyy.MM.dd").print(now.withZone(timeZone))));
}
public void testExpressionInvalidUnescaped() throws Exception {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.rounding.DateTimeUnit;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
@ -317,7 +318,7 @@ public class RoundingTests extends ESTestCase {
}
/**
* randomized test on {@link org.elasticsearch.common.rounding.Rounding.TimeIntervalRounding} with random interval and time zone offsets
* randomized test on {@link org.elasticsearch.common.Rounding.TimeIntervalRounding} with random interval and time zone offsets
*/
public void testIntervalRoundingRandom() {
for (int i = 0; i < 1000; i++) {
@ -728,7 +729,7 @@ public class RoundingTests extends ESTestCase {
}
private static long time(String time, ZoneId zone) {
TemporalAccessor accessor = DateFormatters.forPattern("date_optional_time").withZone(zone).parse(time);
TemporalAccessor accessor = DateFormatter.forPattern("date_optional_time").withZone(zone).parse(time);
return DateFormatters.toZonedDateTime(accessor).toInstant().toEpochMilli();
}

View File

@ -29,13 +29,11 @@ import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
public class JavaJodaTimeDuellingTests extends ESTestCase {
@ -64,11 +62,22 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
formatter3.parse("20181126T121212.123-0830");
}
public void testCustomTimeFormats() {
assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
assertSameDate("12/06", "dd/MM");
assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
}
// this test requires tests to run with -Djava.locale.providers=COMPAT in order to work
// public void testCustomTimeFormats() {
// assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss");
// assertSameDate("12/06", "dd/MM");
// assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z");
//
// // also ensure that locale based dates are the same
// assertSameDate("Di., 05 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
// assertSameDate("Mi., 06 Dez. 2000 02:55:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
// assertSameDate("Do., 07 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
// assertSameDate("Fr., 08 Dez. 2000 00:00:00 -0800", "E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"));
//
// DateTime dateTimeNow = DateTime.now(DateTimeZone.UTC);
// ZonedDateTime javaTimeNow = Instant.ofEpochMilli(dateTimeNow.getMillis()).atZone(ZoneOffset.UTC);
// assertSamePrinterOutput("E, d MMM yyyy HH:mm:ss Z", LocaleUtils.parse("de"), javaTimeNow, dateTimeNow);
// }
public void testDuellingFormatsValidParsing() {
assertSameDate("1522332219", "epoch_second");
@ -133,10 +142,6 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis");
assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction");
assertSameDate("10000", "date_optional_time");
assertSameDate("10000T", "date_optional_time");
assertSameDate("2018", "date_optional_time");
assertSameDate("2018T", "date_optional_time");
assertSameDate("2018-05", "date_optional_time");
assertSameDate("2018-05-30", "date_optional_time");
assertSameDate("2018-05-30T20", "date_optional_time");
@ -278,7 +283,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
// joda comes up with a different exception message here, so we have to adapt
assertJodaParseException("2012-W1-8", "week_date",
"Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]");
assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed");
assertJavaTimeParseException("2012-W1-8", "week_date");
assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time");
assertSameDate("2012-W48-6T10:15:30.123+0100", "week_date_time");
@ -358,6 +363,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertParseException("2018-12-1", "strict_date_optional_time");
assertParseException("2018-1-31", "strict_date_optional_time");
assertParseException("10000-01-31", "strict_date_optional_time");
assertSameDate("2010-01-05T02:00", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30Z", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30+0100", "strict_date_optional_time");
@ -365,6 +371,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertParseException("2018-12-31T10:15:3", "strict_date_optional_time");
assertParseException("2018-12-31T10:5:30", "strict_date_optional_time");
assertParseException("2018-12-31T9:15:30", "strict_date_optional_time");
assertSameDate("2015-01-04T00:00Z", "strict_date_optional_time");
assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time");
assertSameDate("2018-12-31T10:15:30.123+0100", "strict_date_time");
assertSameDate("2018-12-31T10:15:30.123+01:00", "strict_date_time");
@ -456,7 +463,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
// joda comes up with a different exception message here, so we have to adapt
assertJodaParseException("2012-W01-8", "strict_week_date",
"Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]");
assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed");
assertJavaTimeParseException("2012-W01-8", "strict_week_date");
assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time");
assertSameDate("2012-W48-6T10:15:30.123+0100", "strict_week_date_time");
@ -585,19 +592,55 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertSamePrinterOutput("strictYear", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate);
assertSamePrinterOutput("strict_date_optional_time", javaDate, jodaDate);
assertSamePrinterOutput("epoch_millis", javaDate, jodaDate);
}
public void testSamePrinterOutputWithTimeZone() {
String format = "strict_date_optional_time";
String dateInput = "2017-02-01T08:02:00.000-01:00";
DateFormatter javaFormatter = DateFormatter.forPattern(format);
TemporalAccessor javaDate = javaFormatter.parse(dateInput);
DateFormatter jodaFormatter = Joda.forPattern(format);
DateTime dateTime = jodaFormatter.parseJoda(dateInput);
String javaDateString = javaFormatter.withZone(ZoneOffset.ofHours(-1)).format(javaDate);
String jodaDateString = jodaFormatter.withZone(ZoneOffset.ofHours(-1)).formatJoda(dateTime);
String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]",
format, jodaDateString, javaDateString);
assertThat(message, javaDateString, is(jodaDateString));
}
public void testDateFormatterWithLocale() {
Locale locale = randomLocale(random());
String pattern = randomBoolean() ? "strict_date_optional_time||date_time" : "date_time||strict_date_optional_time";
DateFormatter formatter = DateFormatter.forPattern(pattern).withLocale(locale);
assertThat(formatter.pattern(), is(pattern));
assertThat(formatter.locale(), is(locale));
}
public void testSeveralTimeFormats() {
DateFormatter jodaFormatter = DateFormatter.forPattern("year_month_day||ordinal_date");
DateFormatter javaFormatter = DateFormatter.forPattern("8year_month_day||ordinal_date");
assertSameDate("2018-12-12", "year_month_day||ordinal_date", jodaFormatter, javaFormatter);
assertSameDate("2018-128", "year_month_day||ordinal_date", jodaFormatter, javaFormatter);
{
String format = "year_month_day||ordinal_date";
DateFormatter jodaFormatter = Joda.forPattern(format);
DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertSameDate("2018-12-12", format, jodaFormatter, javaFormatter);
assertSameDate("2018-128", format, jodaFormatter, javaFormatter);
}
{
String format = "strictDateOptionalTime||dd-MM-yyyy";
DateFormatter jodaFormatter = Joda.forPattern(format);
DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertSameDate("31-01-2014", format, jodaFormatter, javaFormatter);
}
}
private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli()));
String javaTimeOut = DateFormatters.forPattern(format).format(javaDate);
String jodaTimeOut = DateFormatter.forPattern(format).formatJoda(jodaDate);
String javaTimeOut = DateFormatter.forPattern(format).format(javaDate);
String jodaTimeOut = Joda.forPattern(format).formatJoda(jodaDate);
if (JavaVersion.current().getVersion().get(0) == 8 && javaTimeOut.endsWith(".0")
&& (format.equals("epoch_second") || format.equals("epoch_millis"))) {
// java 8 has a bug in DateTimeFormatter usage when printing dates that rely on isSupportedBy for fields, which is
@ -611,7 +654,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
private void assertSameDate(String input, String format) {
DateFormatter jodaFormatter = Joda.forPattern(format);
DateFormatter javaFormatter = DateFormatters.forPattern(format);
DateFormatter javaFormatter = DateFormatter.forPattern(format);
assertSameDate(input, format, jodaFormatter, javaFormatter);
}
@ -629,7 +672,7 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
private void assertParseException(String input, String format) {
assertJodaParseException(input, format, "Invalid format: \"" + input);
assertJavaTimeParseException(input, format, "Text '" + input + "' could not be parsed");
assertJavaTimeParseException(input, format);
}
private void assertJodaParseException(String input, String format, String expectedMessage) {
@ -638,9 +681,10 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertThat(e.getMessage(), containsString(expectedMessage));
}
private void assertJavaTimeParseException(String input, String format, String expectedMessage) {
DateFormatter javaTimeFormatter = DateFormatters.forPattern(format);
DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input));
assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage));
private void assertJavaTimeParseException(String input, String format) {
DateFormatter javaTimeFormatter = DateFormatter.forPattern(format);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> javaTimeFormatter.parse(input));
assertThat(e.getMessage(), containsString(input));
assertThat(e.getMessage(), containsString(format));
}
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.ZoneId;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.LongSupplier;
@ -35,7 +36,7 @@ import static org.hamcrest.Matchers.equalTo;
public class JodaDateMathParserTests extends ESTestCase {
DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime||epoch_millis");
DateFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis");
DateMathParser parser = formatter.toDateMathParser();
void assertDateMathEquals(String toTest, String expected) {
@ -43,12 +44,12 @@ public class JodaDateMathParserTests extends ESTestCase {
}
void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, DateTimeZone timeZone) {
long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli();
assertDateEquals(gotMillis, toTest, expected);
}
void assertDateEquals(long gotMillis, String original, String expected) {
long expectedMillis = parser.parse(expected, () -> 0);
long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
if (gotMillis != expectedMillis) {
fail("Date math not equal\n" +
"Original : " + original + "\n" +
@ -147,7 +148,7 @@ public class JodaDateMathParserTests extends ESTestCase {
public void testNow() {
final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null);
final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli();
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
@ -164,10 +165,10 @@ public class JodaDateMathParserTests extends ESTestCase {
DateMathParser parser = formatter.toDateMathParser();
assertEquals(
this.formatter.parseMillis("1970-01-01T04:52:20.000Z"),
parser.parse("04:52:20", () -> 0, false, (ZoneId) null));
parser.parse("04:52:20", () -> 0, false, (ZoneId) null).toEpochMilli());
assertEquals(
this.formatter.parseMillis("1970-01-01T04:52:20.999Z"),
parser.parse("04:52:20", () -> 0, true, (ZoneId) null));
parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli());
}
// Implicit rounding happening when parts of the date are not specified
@ -185,9 +186,9 @@ public class JodaDateMathParserTests extends ESTestCase {
assertDateMathEquals("2014-11-18T09:20", "2014-11-18T08:20:59.999Z", 0, true, DateTimeZone.forID("CET"));
// implicit rounding with explicit timezone in the date format
DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-ddZ");
DateFormatter formatter = Joda.forPattern("yyyy-MM-ddZ");
DateMathParser parser = formatter.toDateMathParser();
long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time);
@ -261,7 +262,7 @@ public class JodaDateMathParserTests extends ESTestCase {
// also check other time units
JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second"));
long datetime = parser.parse("1418248078", () -> 0);
long datetime = parser.parse("1418248078", () -> 0).toEpochMilli();
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year

View File

@ -26,15 +26,18 @@ import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
public class JodaTests extends ESTestCase {
public void testBasicTTimePattern() {
DateFormatter formatter1 = DateFormatter.forPattern("basic_t_time");
DateFormatter formatter1 = Joda.forPattern("basic_t_time");
assertEquals(formatter1.pattern(), "basic_t_time");
assertEquals(formatter1.zone(), ZoneOffset.UTC);
DateFormatter formatter2 = DateFormatter.forPattern("basicTTime");
DateFormatter formatter2 = Joda.forPattern("basicTTime");
assertEquals(formatter2.pattern(), "basicTTime");
assertEquals(formatter2.zone(), ZoneOffset.UTC);
@ -42,9 +45,25 @@ public class JodaTests extends ESTestCase {
assertEquals("T102030.040Z", formatter1.formatJoda(dt));
assertEquals("T102030.040Z", formatter1.formatJoda(dt));
expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_t_Time"));
expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_T_Time"));
expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern("basic_T_time"));
expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_t_Time"));
expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_T_Time"));
expectThrows(IllegalArgumentException.class, () -> Joda.forPattern("basic_T_time"));
}
public void testEqualsAndHashcode() {
String format = randomFrom("yyyy/MM/dd HH:mm:ss", "basic_t_time");
JodaDateFormatter first = Joda.forPattern(format);
JodaDateFormatter second = Joda.forPattern(format);
JodaDateFormatter third = Joda.forPattern(" HH:mm:ss, yyyy/MM/dd");
assertThat(first, is(second));
assertThat(second, is(first));
assertThat(first, is(not(third)));
assertThat(second, is(not(third)));
assertThat(first.hashCode(), is(second.hashCode()));
assertThat(second.hashCode(), is(first.hashCode()));
assertThat(first.hashCode(), is(not(third.hashCode())));
assertThat(second.hashCode(), is(not(third.hashCode())));
}
}

View File

@ -1,800 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.joda;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.RootObjectMapper;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDateTime;
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.DateTimeParser;
import org.joda.time.format.ISODateTimeFormat;
import java.util.Date;
import java.util.Locale;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class SimpleJodaTests extends ESTestCase {
public void testMultiParsers() {
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
DateTimeParser[] parsers = new DateTimeParser[3];
parsers[0] = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getParser();
parsers[1] = DateTimeFormat.forPattern("MM-dd-yyyy").withZone(DateTimeZone.UTC).getParser();
parsers[2] = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZone(DateTimeZone.UTC).getParser();
builder.append(DateTimeFormat.forPattern("MM/dd/yyyy").withZone(DateTimeZone.UTC).getPrinter(), parsers);
DateTimeFormatter formatter = builder.toFormatter();
formatter.parseMillis("2009-11-15 14:12:12");
}
public void testIsoDateFormatDateTimeNoMillisUTC() {
DateTimeFormatter formatter = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
assertThat(millis, equalTo(0L));
}
public void testUpperBound() {
MutableDateTime dateTime = new MutableDateTime(3000, 12, 31, 23, 59, 59, 999, DateTimeZone.UTC);
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
String value = "2000-01-01";
int i = formatter.parseInto(dateTime, value, 0);
assertThat(i, equalTo(value.length()));
assertThat(dateTime.toString(), equalTo("2000-01-01T23:59:59.999Z"));
}
public void testIsoDateFormatDateOptionalTimeUTC() {
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00Z");
assertThat(millis, equalTo(0L));
millis = formatter.parseMillis("1970-01-01T00:00:00.001Z");
assertThat(millis, equalTo(1L));
millis = formatter.parseMillis("1970-01-01T00:00:00.1Z");
assertThat(millis, equalTo(100L));
millis = formatter.parseMillis("1970-01-01T00:00:00.1");
assertThat(millis, equalTo(100L));
millis = formatter.parseMillis("1970-01-01T00:00:00");
assertThat(millis, equalTo(0L));
millis = formatter.parseMillis("1970-01-01");
assertThat(millis, equalTo(0L));
millis = formatter.parseMillis("1970");
assertThat(millis, equalTo(0L));
try {
formatter.parseMillis("1970 kuku");
fail("formatting should fail");
} catch (IllegalArgumentException e) {
// all is well
}
// test offset in format
millis = formatter.parseMillis("1970-01-01T00:00:00-02:00");
assertThat(millis, equalTo(TimeValue.timeValueHours(2).millis()));
}
public void testIsoVsCustom() {
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
long millis = formatter.parseMillis("1970-01-01T00:00:00");
assertThat(millis, equalTo(0L));
formatter = DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZone(DateTimeZone.UTC);
millis = formatter.parseMillis("1970/01/01 00:00:00");
assertThat(millis, equalTo(0L));
DateFormatter formatter2 = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss");
millis = formatter2.parseMillis("1970/01/01 00:00:00");
assertThat(millis, equalTo(0L));
}
public void testWriteAndParse() {
DateTimeFormatter dateTimeWriter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC);
DateTimeFormatter formatter = ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC);
Date date = new Date();
assertThat(formatter.parseMillis(dateTimeWriter.print(date.getTime())), equalTo(date.getTime()));
}
public void testSlashInFormat() {
DateFormatter formatter = DateFormatter.forPattern("MM/yyyy");
formatter.parseMillis("01/2001");
DateFormatter formatter2 = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss");
long millis = formatter2.parseMillis("1970/01/01 00:00:00");
formatter2.formatMillis(millis);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
formatter2.parseMillis("1970/01/01"));
}
public void testMultipleFormats() {
DateFormatter formatter = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
long millis = formatter.parseMillis("1970/01/01 00:00:00");
assertThat("1970/01/01 00:00:00", is(formatter.formatMillis(millis)));
}
public void testMultipleDifferentFormats() {
DateFormatter formatter = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
String input = "1970/01/01 00:00:00";
long millis = formatter.parseMillis(input);
assertThat(input, is(formatter.formatMillis(millis)));
DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||dateOptionalTime");
DateFormatter.forPattern("dateOptionalTime||yyyy/MM/dd HH:mm:ss||yyyy/MM/dd");
DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||dateOptionalTime||yyyy/MM/dd");
DateFormatter.forPattern("date_time||date_time_no_millis");
DateFormatter.forPattern(" date_time || date_time_no_millis");
}
public void testInvalidPatterns() {
expectInvalidPattern("does_not_exist_pattern", "Invalid format: [does_not_exist_pattern]: Illegal pattern component: o");
expectInvalidPattern("OOOOO", "Invalid format: [OOOOO]: Illegal pattern component: OOOOO");
expectInvalidPattern(null, "No date pattern provided");
expectInvalidPattern("", "No date pattern provided");
expectInvalidPattern(" ", "No date pattern provided");
expectInvalidPattern("||date_time_no_millis", "No date pattern provided");
expectInvalidPattern("date_time_no_millis||", "No date pattern provided");
}
private void expectInvalidPattern(String pattern, String errorMessage) {
try {
DateFormatter.forPattern(pattern);
fail("Pattern " + pattern + " should have thrown an exception but did not");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString(errorMessage));
}
}
public void testRounding() {
long TIME = utcTimeInMillis("2009-02-03T01:01:01");
MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
time.setMillis(TIME);
assertThat(time.monthOfYear().roundFloor().toString(), equalTo("2009-02-01T00:00:00.000Z"));
time.setMillis(TIME);
assertThat(time.hourOfDay().roundFloor().toString(), equalTo("2009-02-03T01:00:00.000Z"));
time.setMillis(TIME);
assertThat(time.dayOfMonth().roundFloor().toString(), equalTo("2009-02-03T00:00:00.000Z"));
}
public void testRoundingSetOnTime() {
MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
time.setRounding(time.getChronology().monthOfYear(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-01T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-01T00:00:00.000Z")));
time.setMillis(utcTimeInMillis("2009-05-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-05-01T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-05-01T00:00:00.000Z")));
time = new MutableDateTime(DateTimeZone.UTC);
time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-03T00:00:00.000Z")));
time.setMillis(utcTimeInMillis("2009-02-02T23:01:01"));
assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2009-02-02T00:00:00.000Z")));
time = new MutableDateTime(DateTimeZone.UTC);
time.setRounding(time.getChronology().weekOfWeekyear(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2011-05-05T01:01:01"));
assertThat(time.toString(), equalTo("2011-05-02T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTimeInMillis("2011-05-02T00:00:00.000Z")));
}
public void testRoundingWithTimeZone() {
MutableDateTime time = new MutableDateTime(DateTimeZone.UTC);
time.setZone(DateTimeZone.forOffsetHours(-2));
time.setRounding(time.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
MutableDateTime utcTime = new MutableDateTime(DateTimeZone.UTC);
utcTime.setRounding(utcTime.getChronology().dayOfMonth(), MutableDateTime.ROUND_FLOOR);
time.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
utcTime.setMillis(utcTimeInMillis("2009-02-03T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-02T00:00:00.000-02:00"));
assertThat(utcTime.toString(), equalTo("2009-02-03T00:00:00.000Z"));
// the time is on the 2nd, and utcTime is on the 3rd, but, because time already encapsulates
// time zone, the millis diff is not 24, but 22 hours
assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
time.setMillis(utcTimeInMillis("2009-02-04T01:01:01"));
utcTime.setMillis(utcTimeInMillis("2009-02-04T01:01:01"));
assertThat(time.toString(), equalTo("2009-02-03T00:00:00.000-02:00"));
assertThat(utcTime.toString(), equalTo("2009-02-04T00:00:00.000Z"));
assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
}
public void testThatEpochsCanBeParsed() {
boolean parseMilliSeconds = randomBoolean();
// epoch: 1433144433655 => date: Mon Jun 1 09:40:33.655 CEST 2015
DateFormatter formatter = DateFormatter.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second");
DateTime dateTime = formatter.parseJoda(parseMilliSeconds ? "1433144433655" : "1433144433");
assertThat(dateTime.getYear(), is(2015));
assertThat(dateTime.getDayOfMonth(), is(1));
assertThat(dateTime.getMonthOfYear(), is(6));
assertThat(dateTime.getHourOfDay(), is(7)); // utc timezone, +2 offset due to CEST
assertThat(dateTime.getMinuteOfHour(), is(40));
assertThat(dateTime.getSecondOfMinute(), is(33));
if (parseMilliSeconds) {
assertThat(dateTime.getMillisOfSecond(), is(655));
} else {
assertThat(dateTime.getMillisOfSecond(), is(0));
}
// test floats get truncated
String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L),
randomNonNegativeLong());
assertThat(formatter.parseJoda(epochFloatValue).getMillis(), is(dateTime.getMillis()));
}
public void testThatNegativeEpochsCanBeParsed() {
// problem: negative epochs can be arbitrary in size...
boolean parseMilliSeconds = randomBoolean();
DateFormatter formatter = DateFormatter.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second");
DateTime dateTime = formatter.parseJoda("-10000");
assertThat(dateTime.getYear(), is(1969));
assertThat(dateTime.getMonthOfYear(), is(12));
assertThat(dateTime.getDayOfMonth(), is(31));
if (parseMilliSeconds) {
assertThat(dateTime.getHourOfDay(), is(23)); // utc timezone, +2 offset due to CEST
assertThat(dateTime.getMinuteOfHour(), is(59));
assertThat(dateTime.getSecondOfMinute(), is(50));
} else {
assertThat(dateTime.getHourOfDay(), is(21)); // utc timezone, +2 offset due to CEST
assertThat(dateTime.getMinuteOfHour(), is(13));
assertThat(dateTime.getSecondOfMinute(), is(20));
}
// test floats get truncated
String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L),
randomNonNegativeLong());
assertThat(formatter.parseJoda(epochFloatValue).getMillis(), is(dateTime.getMillis()));
// every negative epoch must be parsed, no matter if exact the size or bigger
if (parseMilliSeconds) {
formatter.parseJoda("-100000000");
formatter.parseJoda("-999999999999");
formatter.parseJoda("-1234567890123");
formatter.parseJoda("-1234567890123456789");
formatter.parseJoda("-1234567890123.9999");
formatter.parseJoda("-1234567890123456789.9999");
} else {
formatter.parseJoda("-100000000");
formatter.parseJoda("-1234567890");
formatter.parseJoda("-1234567890123456");
formatter.parseJoda("-1234567890.9999");
formatter.parseJoda("-1234567890123456.9999");
}
assertWarnings("Use of negative values" +
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
}
public void testForInvalidDatesInEpochSecond() {
DateFormatter formatter = DateFormatter.forPattern("epoch_second");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
formatter.parseJoda(randomFrom("invalid date", "12345678901234567", "12345678901234567890")));
assertThat(e.getMessage(), containsString("Invalid format"));
}
public void testForInvalidDatesInEpochMillis() {
DateFormatter formatter = DateFormatter.forPattern("epoch_millis");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
formatter.parseJoda(randomFrom("invalid date", "12345678901234567890")));
assertThat(e.getMessage(), containsString("Invalid format"));
}
public void testForInvalidTimeZoneWithEpochSeconds() {
DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder()
.append(new Joda.EpochTimeParser(false))
.toFormatter()
.withZone(DateTimeZone.forOffsetHours(1))
.withLocale(Locale.ROOT);
DateFormatter formatter =
new JodaDateFormatter("epoch_seconds", dateTimeFormatter, dateTimeFormatter);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
formatter.parseJoda("1433144433655"));
assertThat(e.getMessage(), containsString("time_zone must be UTC"));
}
public void testForInvalidTimeZoneWithEpochMillis() {
DateTimeFormatter dateTimeFormatter = new DateTimeFormatterBuilder()
.append(new Joda.EpochTimeParser(true))
.toFormatter()
.withZone(DateTimeZone.forOffsetHours(1))
.withLocale(Locale.ROOT);
DateFormatter formatter =
new JodaDateFormatter("epoch_millis", dateTimeFormatter, dateTimeFormatter);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
formatter.parseJoda("1433144433"));
assertThat(e.getMessage(), containsString("time_zone must be UTC"));
}
public void testThatEpochParserIsPrinter() {
JodaDateFormatter formatter = Joda.forPattern("epoch_millis");
assertThat(formatter.parser.isPrinter(), is(true));
assertThat(formatter.printer.isPrinter(), is(true));
JodaDateFormatter epochSecondFormatter = Joda.forPattern("epoch_second");
assertThat(epochSecondFormatter.parser.isPrinter(), is(true));
assertThat(epochSecondFormatter.printer.isPrinter(), is(true));
}
public void testThatEpochTimePrinterWorks() {
StringBuffer buffer = new StringBuffer();
LocalDateTime now = LocalDateTime.now();
Joda.EpochTimePrinter epochTimePrinter = new Joda.EpochTimePrinter(false);
epochTimePrinter.printTo(buffer, now, Locale.ROOT);
assertThat(buffer.length(), is(10));
// only check the last digit, as seconds go from 0-99 in the unix timestamp and don't stop at 60
assertThat(buffer.toString(), endsWith(String.valueOf(now.getSecondOfMinute() % 10)));
buffer = new StringBuffer();
Joda.EpochTimePrinter epochMilliSecondTimePrinter = new Joda.EpochTimePrinter(true);
epochMilliSecondTimePrinter.printTo(buffer, now, Locale.ROOT);
assertThat(buffer.length(), is(13));
assertThat(buffer.toString(), endsWith(String.valueOf(now.getMillisOfSecond())));
}
public void testThatEpochParserIsIdempotent() {
DateFormatter formatter = DateFormatter.forPattern("epoch_millis");
DateTime dateTime = formatter.parseJoda("1234567890123");
assertThat(dateTime.getMillis(), is(1234567890123L));
dateTime = formatter.parseJoda("1234567890456");
assertThat(dateTime.getMillis(), is(1234567890456L));
dateTime = formatter.parseJoda("1234567890789");
assertThat(dateTime.getMillis(), is(1234567890789L));
dateTime = formatter.parseJoda("1234567890123456789");
assertThat(dateTime.getMillis(), is(1234567890123456789L));
DateFormatter secondsFormatter = DateFormatter.forPattern("epoch_second");
DateTime secondsDateTime = secondsFormatter.parseJoda("1234567890");
assertThat(secondsDateTime.getMillis(), is(1234567890000L));
secondsDateTime = secondsFormatter.parseJoda("1234567890");
assertThat(secondsDateTime.getMillis(), is(1234567890000L));
secondsDateTime = secondsFormatter.parseJoda("1234567890");
assertThat(secondsDateTime.getMillis(), is(1234567890000L));
secondsDateTime = secondsFormatter.parseJoda("1234567890123456");
assertThat(secondsDateTime.getMillis(), is(1234567890123456000L));
}
public void testThatDefaultFormatterChecksForCorrectYearLength() throws Exception {
// if no strict version is tested, this means the date format is already strict by itself
// yyyyMMdd
assertValidDateFormatParsing("basicDate", "20140303");
assertDateFormatParsingThrowingException("basicDate", "2010303");
// yyyyMMddT'HHmmss.SSSZ
assertValidDateFormatParsing("basicDateTime", "20140303T124343.123Z");
assertValidDateFormatParsing("basicDateTime", "00050303T124343.123Z");
assertDateFormatParsingThrowingException("basicDateTime", "50303T124343.123Z");
// yyyyMMddT'HHmmssZ
assertValidDateFormatParsing("basicDateTimeNoMillis", "20140303T124343Z");
assertValidDateFormatParsing("basicDateTimeNoMillis", "00050303T124343Z");
assertDateFormatParsingThrowingException("basicDateTimeNoMillis", "50303T124343Z");
// yyyyDDD
assertValidDateFormatParsing("basicOrdinalDate", "0005165");
assertDateFormatParsingThrowingException("basicOrdinalDate", "5165");
// yyyyDDDT'HHmmss.SSSZ
assertValidDateFormatParsing("basicOrdinalDateTime", "0005165T124343.123Z");
assertValidDateFormatParsing("basicOrdinalDateTime", "0005165T124343.123Z");
assertDateFormatParsingThrowingException("basicOrdinalDateTime", "5165T124343.123Z");
// yyyyDDDT'HHmmssZ
assertValidDateFormatParsing("basicOrdinalDateTimeNoMillis", "0005165T124343Z");
assertValidDateFormatParsing("basicOrdinalDateTimeNoMillis", "0005165T124343Z");
assertDateFormatParsingThrowingException("basicOrdinalDateTimeNoMillis", "5165T124343Z");
// HHmmss.SSSZ
assertValidDateFormatParsing("basicTime", "090909.123Z");
assertDateFormatParsingThrowingException("basicTime", "90909.123Z");
// HHmmssZ
assertValidDateFormatParsing("basicTimeNoMillis", "090909Z");
assertDateFormatParsingThrowingException("basicTimeNoMillis", "90909Z");
// 'THHmmss.SSSZ
assertValidDateFormatParsing("basicTTime", "T090909.123Z");
assertDateFormatParsingThrowingException("basicTTime", "T90909.123Z");
// THHmmssZ
assertValidDateFormatParsing("basicTTimeNoMillis", "T090909Z");
assertDateFormatParsingThrowingException("basicTTimeNoMillis", "T90909Z");
// xxxxW'wwe
assertValidDateFormatParsing("basicWeekDate", "0005W414");
assertValidDateFormatParsing("basicWeekDate", "5W414", "0005W414");
assertDateFormatParsingThrowingException("basicWeekDate", "5W14");
assertValidDateFormatParsing("strictBasicWeekDate", "0005W414");
assertDateFormatParsingThrowingException("strictBasicWeekDate", "0005W47");
assertDateFormatParsingThrowingException("strictBasicWeekDate", "5W414");
assertDateFormatParsingThrowingException("strictBasicWeekDate", "5W14");
// xxxxW'wweT'HHmmss.SSSZ
assertValidDateFormatParsing("basicWeekDateTime", "0005W414T124343.123Z");
assertValidDateFormatParsing("basicWeekDateTime", "5W414T124343.123Z", "0005W414T124343.123Z");
assertDateFormatParsingThrowingException("basicWeekDateTime", "5W14T124343.123Z");
assertValidDateFormatParsing("strictBasicWeekDateTime", "0005W414T124343.123Z");
assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "0005W47T124343.123Z");
assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "5W414T124343.123Z");
assertDateFormatParsingThrowingException("strictBasicWeekDateTime", "5W14T124343.123Z");
// xxxxW'wweT'HHmmssZ
assertValidDateFormatParsing("basicWeekDateTimeNoMillis", "0005W414T124343Z");
assertValidDateFormatParsing("basicWeekDateTimeNoMillis", "5W414T124343Z", "0005W414T124343Z");
assertDateFormatParsingThrowingException("basicWeekDateTimeNoMillis", "5W14T124343Z");
assertValidDateFormatParsing("strictBasicWeekDateTimeNoMillis", "0005W414T124343Z");
assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "0005W47T124343Z");
assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "5W414T124343Z");
assertDateFormatParsingThrowingException("strictBasicWeekDateTimeNoMillis", "5W14T124343Z");
// yyyy-MM-dd
assertValidDateFormatParsing("date", "0005-06-03");
assertValidDateFormatParsing("date", "5-6-3", "0005-06-03");
assertValidDateFormatParsing("strictDate", "0005-06-03");
assertDateFormatParsingThrowingException("strictDate", "5-6-3");
assertDateFormatParsingThrowingException("strictDate", "0005-06-3");
assertDateFormatParsingThrowingException("strictDate", "0005-6-03");
assertDateFormatParsingThrowingException("strictDate", "5-06-03");
// yyyy-MM-dd'T'HH
assertValidDateFormatParsing("dateHour", "0005-06-03T12");
assertValidDateFormatParsing("dateHour", "5-6-3T1", "0005-06-03T01");
assertValidDateFormatParsing("strictDateHour", "0005-06-03T12");
assertDateFormatParsingThrowingException("strictDateHour", "5-6-3T1");
// yyyy-MM-dd'T'HH:mm
assertValidDateFormatParsing("dateHourMinute", "0005-06-03T12:12");
assertValidDateFormatParsing("dateHourMinute", "5-6-3T12:1", "0005-06-03T12:01");
assertValidDateFormatParsing("strictDateHourMinute", "0005-06-03T12:12");
assertDateFormatParsingThrowingException("strictDateHourMinute", "5-6-3T12:1");
// yyyy-MM-dd'T'HH:mm:ss
assertValidDateFormatParsing("dateHourMinuteSecond", "0005-06-03T12:12:12");
assertValidDateFormatParsing("dateHourMinuteSecond", "5-6-3T12:12:1", "0005-06-03T12:12:01");
assertValidDateFormatParsing("strictDateHourMinuteSecond", "0005-06-03T12:12:12");
assertDateFormatParsingThrowingException("strictDateHourMinuteSecond", "5-6-3T12:12:1");
// yyyy-MM-ddT'HH:mm:ss.SSS
assertValidDateFormatParsing("dateHourMinuteSecondFraction", "0005-06-03T12:12:12.123");
assertValidDateFormatParsing("dateHourMinuteSecondFraction", "5-6-3T12:12:1.123", "0005-06-03T12:12:01.123");
assertValidDateFormatParsing("dateHourMinuteSecondFraction", "5-6-3T12:12:1.1", "0005-06-03T12:12:01.100");
assertValidDateFormatParsing("strictDateHourMinuteSecondFraction", "0005-06-03T12:12:12.123");
assertDateFormatParsingThrowingException("strictDateHourMinuteSecondFraction", "5-6-3T12:12:12.1");
assertDateFormatParsingThrowingException("strictDateHourMinuteSecondFraction", "5-6-3T12:12:12.12");
assertValidDateFormatParsing("dateHourMinuteSecondMillis", "0005-06-03T12:12:12.123");
assertValidDateFormatParsing("dateHourMinuteSecondMillis", "5-6-3T12:12:1.123", "0005-06-03T12:12:01.123");
assertValidDateFormatParsing("dateHourMinuteSecondMillis", "5-6-3T12:12:1.1", "0005-06-03T12:12:01.100");
assertValidDateFormatParsing("strictDateHourMinuteSecondMillis", "0005-06-03T12:12:12.123");
assertDateFormatParsingThrowingException("strictDateHourMinuteSecondMillis", "5-6-3T12:12:12.1");
assertDateFormatParsingThrowingException("strictDateHourMinuteSecondMillis", "5-6-3T12:12:12.12");
// yyyy-MM-dd'T'HH:mm:ss.SSSZ
assertValidDateFormatParsing("dateOptionalTime", "2014-03-03", "2014-03-03T00:00:00.000Z");
assertValidDateFormatParsing("dateOptionalTime", "1257-3-03", "1257-03-03T00:00:00.000Z");
assertValidDateFormatParsing("dateOptionalTime", "0005-03-3", "0005-03-03T00:00:00.000Z");
assertValidDateFormatParsing("dateOptionalTime", "5-03-03", "0005-03-03T00:00:00.000Z");
assertValidDateFormatParsing("dateOptionalTime", "5-03-03T1:1:1.1", "0005-03-03T01:01:01.100Z");
assertValidDateFormatParsing("strictDateOptionalTime", "2014-03-03", "2014-03-03T00:00:00.000Z");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "0005-3-03");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "0005-03-3");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T1:1:1.1");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:01:01.1");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:01:1.100");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T01:1:01.100");
assertDateFormatParsingThrowingException("strictDateOptionalTime", "5-03-03T1:01:01.100");
// yyyy-MM-ddT'HH:mm:ss.SSSZZ
assertValidDateFormatParsing("dateTime", "5-03-03T1:1:1.1Z", "0005-03-03T01:01:01.100Z");
assertValidDateFormatParsing("strictDateTime", "2014-03-03T11:11:11.100Z", "2014-03-03T11:11:11.100Z");
assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T1:1:1.1Z");
assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T01:01:1.100Z");
assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T01:1:01.100Z");
assertDateFormatParsingThrowingException("strictDateTime", "0005-03-03T1:01:01.100Z");
// yyyy-MM-ddT'HH:mm:ssZZ
assertValidDateFormatParsing("dateTimeNoMillis", "5-03-03T1:1:1Z", "0005-03-03T01:01:01Z");
assertValidDateFormatParsing("strictDateTimeNoMillis", "2014-03-03T11:11:11Z", "2014-03-03T11:11:11Z");
assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T1:1:1Z");
assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T01:01:1Z");
assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T01:1:01Z");
assertDateFormatParsingThrowingException("strictDateTimeNoMillis", "0005-03-03T1:01:01Z");
// HH
assertValidDateFormatParsing("hour", "12");
assertValidDateFormatParsing("hour", "1", "01");
assertValidDateFormatParsing("strictHour", "12");
assertValidDateFormatParsing("strictHour", "01");
assertDateFormatParsingThrowingException("strictHour", "1");
// HH:mm
assertValidDateFormatParsing("hourMinute", "12:12");
assertValidDateFormatParsing("hourMinute", "12:1", "12:01");
assertValidDateFormatParsing("strictHourMinute", "12:12");
assertValidDateFormatParsing("strictHourMinute", "12:01");
assertDateFormatParsingThrowingException("strictHourMinute", "12:1");
// HH:mm:ss
assertValidDateFormatParsing("hourMinuteSecond", "12:12:12");
assertValidDateFormatParsing("hourMinuteSecond", "12:12:1", "12:12:01");
assertValidDateFormatParsing("strictHourMinuteSecond", "12:12:12");
assertValidDateFormatParsing("strictHourMinuteSecond", "12:12:01");
assertDateFormatParsingThrowingException("strictHourMinuteSecond", "12:12:1");
// HH:mm:ss.SSS
assertValidDateFormatParsing("hourMinuteSecondFraction", "12:12:12.123");
assertValidDateFormatParsing("hourMinuteSecondFraction", "12:12:12.1", "12:12:12.100");
assertValidDateFormatParsing("strictHourMinuteSecondFraction", "12:12:12.123");
assertValidDateFormatParsing("strictHourMinuteSecondFraction", "12:12:12.1", "12:12:12.100");
assertValidDateFormatParsing("hourMinuteSecondMillis", "12:12:12.123");
assertValidDateFormatParsing("hourMinuteSecondMillis", "12:12:12.1", "12:12:12.100");
assertValidDateFormatParsing("strictHourMinuteSecondMillis", "12:12:12.123");
assertValidDateFormatParsing("strictHourMinuteSecondMillis", "12:12:12.1", "12:12:12.100");
// yyyy-DDD
assertValidDateFormatParsing("ordinalDate", "5-3", "0005-003");
assertValidDateFormatParsing("strictOrdinalDate", "0005-003");
assertDateFormatParsingThrowingException("strictOrdinalDate", "5-3");
assertDateFormatParsingThrowingException("strictOrdinalDate", "0005-3");
assertDateFormatParsingThrowingException("strictOrdinalDate", "5-003");
// yyyy-DDDT'HH:mm:ss.SSSZZ
assertValidDateFormatParsing("ordinalDateTime", "5-3T12:12:12.100Z", "0005-003T12:12:12.100Z");
assertValidDateFormatParsing("strictOrdinalDateTime", "0005-003T12:12:12.100Z");
assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T1:12:12.123Z");
assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T12:1:12.123Z");
assertDateFormatParsingThrowingException("strictOrdinalDateTime", "5-3T12:12:1.123Z");
// yyyy-DDDT'HH:mm:ssZZ
assertValidDateFormatParsing("ordinalDateTimeNoMillis", "5-3T12:12:12Z", "0005-003T12:12:12Z");
assertValidDateFormatParsing("strictOrdinalDateTimeNoMillis", "0005-003T12:12:12Z");
assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T1:12:12Z");
assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T12:1:12Z");
assertDateFormatParsingThrowingException("strictOrdinalDateTimeNoMillis", "5-3T12:12:1Z");
// HH:mm:ss.SSSZZ
assertValidDateFormatParsing("time", "12:12:12.100Z");
assertValidDateFormatParsing("time", "01:01:01.1Z", "01:01:01.100Z");
assertValidDateFormatParsing("time", "1:1:1.1Z", "01:01:01.100Z");
assertValidDateFormatParsing("strictTime", "12:12:12.100Z");
assertDateFormatParsingThrowingException("strictTime", "12:12:1.100Z");
assertDateFormatParsingThrowingException("strictTime", "12:1:12.100Z");
assertDateFormatParsingThrowingException("strictTime", "1:12:12.100Z");
// HH:mm:ssZZ
assertValidDateFormatParsing("timeNoMillis", "12:12:12Z");
assertValidDateFormatParsing("timeNoMillis", "01:01:01Z", "01:01:01Z");
assertValidDateFormatParsing("timeNoMillis", "1:1:1Z", "01:01:01Z");
assertValidDateFormatParsing("strictTimeNoMillis", "12:12:12Z");
assertDateFormatParsingThrowingException("strictTimeNoMillis", "12:12:1Z");
assertDateFormatParsingThrowingException("strictTimeNoMillis", "12:1:12Z");
assertDateFormatParsingThrowingException("strictTimeNoMillis", "1:12:12Z");
// 'THH:mm:ss.SSSZZ
assertValidDateFormatParsing("tTime", "T12:12:12.100Z");
assertValidDateFormatParsing("tTime", "T01:01:01.1Z", "T01:01:01.100Z");
assertValidDateFormatParsing("tTime", "T1:1:1.1Z", "T01:01:01.100Z");
assertValidDateFormatParsing("strictTTime", "T12:12:12.100Z");
assertDateFormatParsingThrowingException("strictTTime", "T12:12:1.100Z");
assertDateFormatParsingThrowingException("strictTTime", "T12:1:12.100Z");
assertDateFormatParsingThrowingException("strictTTime", "T1:12:12.100Z");
// 'THH:mm:ssZZ
assertValidDateFormatParsing("tTimeNoMillis", "T12:12:12Z");
assertValidDateFormatParsing("tTimeNoMillis", "T01:01:01Z", "T01:01:01Z");
assertValidDateFormatParsing("tTimeNoMillis", "T1:1:1Z", "T01:01:01Z");
assertValidDateFormatParsing("strictTTimeNoMillis", "T12:12:12Z");
assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T12:12:1Z");
assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T12:1:12Z");
assertDateFormatParsingThrowingException("strictTTimeNoMillis", "T1:12:12Z");
// xxxx-'Www-e
assertValidDateFormatParsing("weekDate", "0005-W4-1", "0005-W04-1");
assertValidDateFormatParsing("strictWeekDate", "0005-W04-1");
assertDateFormatParsingThrowingException("strictWeekDate", "0005-W4-1");
// xxxx-'Www-eT'HH:mm:ss.SSSZZ
assertValidDateFormatParsing("weekDateTime", "0005-W41-4T12:43:43.123Z");
assertValidDateFormatParsing("weekDateTime", "5-W41-4T12:43:43.123Z", "0005-W41-4T12:43:43.123Z");
assertValidDateFormatParsing("strictWeekDateTime", "0005-W41-4T12:43:43.123Z");
assertValidDateFormatParsing("strictWeekDateTime", "0005-W06-4T12:43:43.123Z");
assertDateFormatParsingThrowingException("strictWeekDateTime", "0005-W4-7T12:43:43.123Z");
assertDateFormatParsingThrowingException("strictWeekDateTime", "5-W41-4T12:43:43.123Z");
assertDateFormatParsingThrowingException("strictWeekDateTime", "5-W1-4T12:43:43.123Z");
// xxxx-'Www-eT'HH:mm:ssZZ
assertValidDateFormatParsing("weekDateTimeNoMillis", "0005-W41-4T12:43:43Z");
assertValidDateFormatParsing("weekDateTimeNoMillis", "5-W41-4T12:43:43Z", "0005-W41-4T12:43:43Z");
assertValidDateFormatParsing("strictWeekDateTimeNoMillis", "0005-W41-4T12:43:43Z");
assertValidDateFormatParsing("strictWeekDateTimeNoMillis", "0005-W06-4T12:43:43Z");
assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "0005-W4-7T12:43:43Z");
assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "5-W41-4T12:43:43Z");
assertDateFormatParsingThrowingException("strictWeekDateTimeNoMillis", "5-W1-4T12:43:43Z");
// yyyy
assertValidDateFormatParsing("weekyear", "2014");
assertValidDateFormatParsing("weekyear", "5", "0005");
assertValidDateFormatParsing("weekyear", "0005");
assertValidDateFormatParsing("strictWeekyear", "2014");
assertValidDateFormatParsing("strictWeekyear", "0005");
assertDateFormatParsingThrowingException("strictWeekyear", "5");
// yyyy-'W'ee
assertValidDateFormatParsing("weekyearWeek", "2014-W41");
assertValidDateFormatParsing("weekyearWeek", "2014-W1", "2014-W01");
assertValidDateFormatParsing("strictWeekyearWeek", "2014-W41");
assertDateFormatParsingThrowingException("strictWeekyearWeek", "2014-W1");
// weekyearWeekDay
assertValidDateFormatParsing("weekyearWeekDay", "2014-W41-1");
assertValidDateFormatParsing("weekyearWeekDay", "2014-W1-1", "2014-W01-1");
assertValidDateFormatParsing("strictWeekyearWeekDay", "2014-W41-1");
assertDateFormatParsingThrowingException("strictWeekyearWeekDay", "2014-W1-1");
// yyyy
assertValidDateFormatParsing("year", "2014");
assertValidDateFormatParsing("year", "5", "0005");
assertValidDateFormatParsing("strictYear", "2014");
assertDateFormatParsingThrowingException("strictYear", "5");
// yyyy-mm
assertValidDateFormatParsing("yearMonth", "2014-12");
assertValidDateFormatParsing("yearMonth", "2014-5", "2014-05");
assertValidDateFormatParsing("strictYearMonth", "2014-12");
assertDateFormatParsingThrowingException("strictYearMonth", "2014-5");
// yyyy-mm-dd
assertValidDateFormatParsing("yearMonthDay", "2014-12-12");
assertValidDateFormatParsing("yearMonthDay", "2014-05-5", "2014-05-05");
assertValidDateFormatParsing("strictYearMonthDay", "2014-12-12");
assertDateFormatParsingThrowingException("strictYearMonthDay", "2014-05-5");
}
public void testThatRootObjectParsingIsStrict() throws Exception {
String[] datesThatWork = new String[] { "2014/10/10", "2014/10/10 12:12:12", "2014-05-05", "2014-05-05T12:12:12.123Z" };
String[] datesThatShouldNotWork = new String[]{ "5-05-05", "2014-5-05", "2014-05-5",
"2014-05-05T1:12:12.123Z", "2014-05-05T12:1:12.123Z", "2014-05-05T12:12:1.123Z",
"4/10/10", "2014/1/10", "2014/10/1",
"2014/10/10 1:12:12", "2014/10/10 12:1:12", "2014/10/10 12:12:1"
};
// good case
for (String date : datesThatWork) {
boolean dateParsingSuccessful = false;
for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
try {
dateTimeFormatter.parseMillis(date);
dateParsingSuccessful = true;
break;
} catch (Exception e) {}
}
if (!dateParsingSuccessful) {
fail("Parsing for date " + date + " in root object mapper failed, but shouldnt");
}
}
// bad case
for (String date : datesThatShouldNotWork) {
for (DateFormatter dateTimeFormatter : RootObjectMapper.Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {
try {
dateTimeFormatter.parseMillis(date);
fail(String.format(Locale.ROOT, "Expected exception when parsing date %s in root mapper", date));
} catch (Exception e) {}
}
}
}
public void testDeprecatedFormatSpecifiers() {
Joda.forPattern("CC");
assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" +
" next major version of Elasticsearch.");
Joda.forPattern("YYYY");
assertWarnings("Use of 'Y' (year-of-era) will change to 'y' in the" +
" next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier.");
Joda.forPattern("xxxx");
assertWarnings("Use of 'x' (week-based-year) will change" +
" to 'Y' in the next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier.");
// multiple deprecations
Joda.forPattern("CC-YYYY");
assertWarnings("Use of 'C' (century-of-era) is deprecated and will not be supported in the" +
" next major version of Elasticsearch.", "Use of 'Y' (year-of-era) will change to 'y' in the" +
" next major version of Elasticsearch. Prefix your date format with '8' to use the new specifier.");
}
public void testDeprecatedEpochScientificNotation() {
assertValidDateFormatParsing("epoch_second", "1.234e5", "123400");
assertWarnings("Use of scientific notation" +
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
assertValidDateFormatParsing("epoch_millis", "1.234e5", "123400");
assertWarnings("Use of scientific notation" +
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
}
public void testDeprecatedEpochNegative() {
assertValidDateFormatParsing("epoch_second", "-12345", "-12345");
assertWarnings("Use of negative values" +
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
assertValidDateFormatParsing("epoch_millis", "-12345", "-12345");
assertWarnings("Use of negative values" +
" in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
}
private void assertValidDateFormatParsing(String pattern, String dateToParse) {
assertValidDateFormatParsing(pattern, dateToParse, dateToParse);
}
private void assertValidDateFormatParsing(String pattern, String dateToParse, String expectedDate) {
DateFormatter formatter = DateFormatter.forPattern(pattern);
assertThat(formatter.formatMillis(formatter.parseMillis(dateToParse)), is(expectedDate));
}
private void assertDateFormatParsingThrowingException(String pattern, String invalidDate) {
try {
DateFormatter formatter = DateFormatter.forPattern(pattern);
formatter.parseMillis(invalidDate);
fail(String.format(Locale.ROOT, "Expected parsing exception for pattern [%s] with date [%s], but did not happen",
pattern, invalidDate));
} catch (IllegalArgumentException e) {
}
}
private long utcTimeInMillis(String time) {
return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time);
}
}

View File

@ -19,9 +19,11 @@
package org.elasticsearch.common.rounding;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.time.ZoneOffset;
@ -42,6 +44,7 @@ public class RoundingDuelTests extends ESTestCase {
rounding = org.elasticsearch.common.Rounding.builder(timeValue()).timeZone(ZoneOffset.UTC).build();
}
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(VersionUtils.getPreviousVersion(Version.V_7_0_0));
rounding.writeTo(output);
Rounding roundingJoda = Rounding.Streams.read(output.bytes().streamInput());

View File

@ -25,7 +25,6 @@ import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.util.Locale;
@ -57,13 +56,13 @@ public class DateFormattersTests extends ESTestCase {
}
}
public void testEpochMilliParser() {
public void testInvalidEpochMilliParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid"));
assertThat(e.getMessage(), containsString("could not be parsed"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("invalid"));
assertThat(e.getMessage(), containsString("failed to parse date field [invalid] with format [epoch_millis]"));
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("123.1234567"));
assertThat(e.getMessage(), containsString("unparsed text found at index 3"));
e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("123.1234567"));
assertThat(e.getMessage(), containsString("failed to parse date field [123.1234567] with format [epoch_millis]"));
}
// this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma
@ -72,14 +71,14 @@ public class DateFormattersTests extends ESTestCase {
public void testEpochSecondParser() {
DateFormatter formatter = DateFormatters.forPattern("epoch_second");
DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1"));
assertThat(e.getMessage(), is("Text '1234.1' could not be parsed, unparsed text found at index 4"));
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234."));
assertThat(e.getMessage(), is("Text '1234.' could not be parsed, unparsed text found at index 4"));
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc"));
assertThat(e.getMessage(), is("Text 'abc' could not be parsed, unparsed text found at index 0"));
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc"));
assertThat(e.getMessage(), is("Text '1234.abc' could not be parsed, unparsed text found at index 4"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("1234.1234567890"));
assertThat(e.getMessage(), is("failed to parse date field [1234.1234567890] with format [epoch_second]"));
e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("1234.123456789013221"));
assertThat(e.getMessage(), containsString("[1234.123456789013221]"));
e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("abc"));
assertThat(e.getMessage(), containsString("[abc]"));
e = expectThrows(IllegalArgumentException .class, () -> formatter.parse("1234.abc"));
assertThat(e.getMessage(), containsString("[1234.abc]"));
}
public void testEpochMilliParsersWithDifferentFormatters() {
@ -139,7 +138,7 @@ public class DateFormattersTests extends ESTestCase {
assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis")));
}
public void testForceJava8() {
public void testSupportBackwardsJava8Format() {
assertThat(DateFormatter.forPattern("8yyyy-MM-dd"), instanceOf(JavaDateFormatter.class));
// named formats too
assertThat(DateFormatter.forPattern("8date_optional_time"), instanceOf(JavaDateFormatter.class));
@ -161,27 +160,29 @@ public class DateFormattersTests extends ESTestCase {
}
public void testRoundupFormatterWithEpochDates() {
assertRoundupFormatter("8epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L);
// also check nanos of the epoch_millis formatter if it is rounded up to the nano second
DateTimeFormatter roundUpFormatter = ((JavaDateFormatter) DateFormatter.forPattern("8epoch_millis")).getRoundupParser();
Instant epochMilliInstant = DateFormatters.toZonedDateTime(roundUpFormatter.parse("1234567890")).toInstant();
assertThat(epochMilliInstant.getLong(ChronoField.NANO_OF_SECOND), is(890_999_999L));
assertRoundupFormatter("8strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L);
assertRoundupFormatter("8strict_date_optional_time||epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L);
assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10T12:13:14.123Z", 1539173594123L);
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("strict_date_optional_time||epoch_millis", "2018-10-10", 1539215999999L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "2018-10-10T12:13:14.123", 1539173594123L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_millis", "1234567890", 1234567890L);
assertRoundupFormatter("8epoch_second", "1234567890", 1234567890999L);
assertRoundupFormatter("epoch_second", "1234567890", 1234567890999L);
// also check nanos of the epoch_millis formatter if it is rounded up to the nano second
DateTimeFormatter epochSecondRoundupParser = ((JavaDateFormatter) DateFormatter.forPattern("8epoch_second")).getRoundupParser();
Instant epochSecondInstant = DateFormatters.toZonedDateTime(epochSecondRoundupParser.parse("1234567890")).toInstant();
assertThat(epochSecondInstant.getLong(ChronoField.NANO_OF_SECOND), is(999_999_999L));
assertRoundupFormatter("8strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L);
assertRoundupFormatter("8strict_date_optional_time||epoch_second", "1234567890", 1234567890999L);
assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L);
assertRoundupFormatter("8uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L);
assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10T12:13:14.123Z", 1539173594123L);
assertRoundupFormatter("strict_date_optional_time||epoch_second", "1234567890", 1234567890999L);
assertRoundupFormatter("strict_date_optional_time||epoch_second", "2018-10-10", 1539215999999L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "2018-10-10T12:13:14.123", 1539173594123L);
assertRoundupFormatter("uuuu-MM-dd'T'HH:mm:ss.SSS||epoch_second", "1234567890", 1234567890999L);
}
private void assertRoundupFormatter(String format, String input, long expectedMilliSeconds) {
@ -194,8 +195,8 @@ public class DateFormattersTests extends ESTestCase {
public void testRoundupFormatterZone() {
ZoneId zoneId = randomZone();
String format = randomFrom("8epoch_second", "8epoch_millis", "8strict_date_optional_time", "8uuuu-MM-dd'T'HH:mm:ss.SSS",
"8strict_date_optional_time||date_optional_time");
String format = randomFrom("epoch_second", "epoch_millis", "strict_date_optional_time", "uuuu-MM-dd'T'HH:mm:ss.SSS",
"strict_date_optional_time||date_optional_time");
JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withZone(zoneId);
DateTimeFormatter roundUpFormatter = formatter.getRoundupParser();
assertThat(roundUpFormatter.getZone(), is(zoneId));
@ -204,8 +205,8 @@ public class DateFormattersTests extends ESTestCase {
public void testRoundupFormatterLocale() {
Locale locale = randomLocale(random());
String format = randomFrom("8epoch_second", "8epoch_millis", "8strict_date_optional_time", "8uuuu-MM-dd'T'HH:mm:ss.SSS",
"8strict_date_optional_time||date_optional_time");
String format = randomFrom("epoch_second", "epoch_millis", "strict_date_optional_time", "uuuu-MM-dd'T'HH:mm:ss.SSS",
"strict_date_optional_time||date_optional_time");
JavaDateFormatter formatter = (JavaDateFormatter) DateFormatter.forPattern(format).withLocale(locale);
DateTimeFormatter roundupParser = formatter.getRoundupParser();
assertThat(roundupParser.getLocale(), is(locale));

View File

@ -39,8 +39,6 @@ public class JavaDateMathParserTests extends ESTestCase {
private final DateMathParser parser = formatter.toDateMathParser();
public void testBasicDates() {
assertDateMathEquals("2014", "2014-01-01T00:00:00.000");
assertDateMathEquals("2014-05", "2014-05-01T00:00:00.000");
assertDateMathEquals("2014-05-30", "2014-05-30T00:00:00.000");
assertDateMathEquals("2014-05-30T20", "2014-05-30T20:00:00.000");
assertDateMathEquals("2014-05-30T20:21", "2014-05-30T20:21:00.000");
@ -125,7 +123,7 @@ public class JavaDateMathParserTests extends ESTestCase {
}
public void testNow() {
final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null);
final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null).toEpochMilli();
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
@ -142,11 +140,11 @@ public class JavaDateMathParserTests extends ESTestCase {
DateMathParser parser = formatter.toDateMathParser();
ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20"));
assertThat(zonedDateTime.getYear(), is(1970));
long millisStart = zonedDateTime.toInstant().toEpochMilli();
Instant millisStart = zonedDateTime.toInstant();
assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, (ZoneId) null));
// due to rounding up, we have to add the number of milliseconds here manually
long millisEnd = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")).toInstant().toEpochMilli() + 999;
assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null));
assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null).toEpochMilli());
}
// Implicit rounding happening when parts of the date are not specified
@ -166,9 +164,10 @@ public class JavaDateMathParserTests extends ESTestCase {
// implicit rounding with explicit timezone in the date format
DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX");
DateMathParser parser = formatter.toDateMathParser();
long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
Instant time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time);
time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null);
time = DateFormatter.forPattern("strict_date_optional_time_nanos").toDateMathParser()
.parse("2011-10-09T23:59:59.999+01:00", () -> 0, false, (ZoneId) null);
assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time);
}
@ -176,7 +175,6 @@ public class JavaDateMathParserTests extends ESTestCase {
public void testExplicitRounding() {
assertDateMathEquals("2014-11-18||/y", "2014-01-01", 0, false, null);
assertDateMathEquals("2014-11-18||/y", "2014-12-31T23:59:59.999", 0, true, null);
assertDateMathEquals("2014||/y", "2014-01-01", 0, false, null);
assertDateMathEquals("2014-01-01T00:00:00.001||/y", "2014-12-31T23:59:59.999", 0, true, null);
// rounding should also take into account time zone
assertDateMathEquals("2014-11-18||/y", "2013-12-31T23:00:00.000Z", 0, false, ZoneId.of("CET"));
@ -239,16 +237,16 @@ public class JavaDateMathParserTests extends ESTestCase {
assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000");
// also check other time units
DateMathParser parser = DateFormatter.forPattern("8epoch_second||dateOptionalTime").toDateMathParser();
long datetime = parser.parse("1418248078", () -> 0);
DateMathParser parser = DateFormatter.forPattern("epoch_second||dateOptionalTime").toDateMathParser();
long datetime = parser.parse("1418248078", () -> 0).toEpochMilli();
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year
assertDateMathEquals("9999", "9999-01-01T00:00:00.000");
assertDateMathEquals("9999", "1970-01-01T00:00:09.999Z");
// 10000 is also a year, breaking bwc, used to be a timestamp
assertDateMathEquals("10000", "10000-01-01T00:00:00.000");
assertDateMathEquals("10000", "1970-01-01T00:00:10.000Z");
// but 10000 with T is still a date format
assertDateMathEquals("10000T", "10000-01-01T00:00:00.000");
assertDateMathEquals("10000-01-01T", "10000-01-01T00:00:00.000");
}
void assertParseException(String msg, String date, String exc) {
@ -266,7 +264,7 @@ public class JavaDateMathParserTests extends ESTestCase {
public void testIllegalDateFormat() {
assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "failed to parse date field");
assertParseException("Expected bad date format exception", "123bogus", "Unrecognized chars at the end of [123bogus]");
assertParseException("Expected bad date format exception", "123bogus", "failed to parse date field [123bogus]");
}
public void testOnlyCallsNowIfNecessary() {
@ -286,12 +284,12 @@ public class JavaDateMathParserTests extends ESTestCase {
}
private void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, ZoneId timeZone) {
long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone).toEpochMilli();
assertDateEquals(gotMillis, toTest, expected);
}
private void assertDateEquals(long gotMillis, String original, String expected) {
long expectedMillis = parser.parse(expected, () -> 0);
long expectedMillis = parser.parse(expected, () -> 0).toEpochMilli();
if (gotMillis != expectedMillis) {
ZonedDateTime zonedDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(gotMillis), ZoneOffset.UTC);
fail("Date math not equal\n" +

View File

@ -21,21 +21,23 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.junit.Before;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Collection;
import static org.hamcrest.Matchers.containsString;
@ -173,7 +175,8 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
.endObject()),
XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString("Cannot parse \"2016-03-99\""));
assertThat(e.getCause().getMessage(),
containsString("failed to parse date field [2016-03-99] with format [strict_date_optional_time||epoch_millis]"));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "date")
@ -217,36 +220,13 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(1457654400000L, pointField.numericValue().longValue());
}
public void testFloatEpochFormat() throws IOException {
public void testChangeLocale() throws IOException {
assumeTrue("need java 9 for testing ",JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0);
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "date")
.field("format", "epoch_millis").endObject().endObject()
.endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
long epochMillis = randomNonNegativeLong();
String epochFloatValue = epochMillis + "." + randomIntBetween(0, 999);
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", epochFloatValue)
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
assertEquals(epochMillis, pointField.numericValue().longValue());
}
public void testChangeLocale() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject()
.endObject().endObject());
.field("format", "E, d MMM yyyy HH:mm:ss Z")
.field("locale", "de")
.endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
@ -255,7 +235,7 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", 1457654400)
.field("field", "Mi., 06 Dez. 2000 02:55:00 -0800")
.endObject()),
XContentType.JSON));
}
@ -340,12 +320,8 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}
/**
* Test that time zones are correctly parsed by the {@link DateFieldMapper}.
* There is a known bug with Joda 2.9.4 reported in https://github.com/JodaOrg/joda-time/issues/373.
*/
public void testTimeZoneParsing() throws Exception {
final String timeZonePattern = "yyyy-MM-dd" + randomFrom("ZZZ", "[ZZZ]", "'['ZZZ']'");
final String timeZonePattern = "yyyy-MM-dd" + randomFrom("XXX", "[XXX]", "'['XXX']'");
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type")
@ -360,20 +336,22 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString());
final DateTimeZone randomTimeZone = randomBoolean() ? DateTimeZone.forID(randomFrom("UTC", "CET")) : randomDateTimeZone();
final DateTime randomDate = new DateTime(2016, 03, 11, 0, 0, 0, randomTimeZone);
DateFormatter formatter = DateFormatter.forPattern(timeZonePattern);
final ZoneId randomTimeZone = randomBoolean() ? ZoneId.of(randomFrom("UTC", "CET")) : randomZone();
final ZonedDateTime randomDate = ZonedDateTime.of(2016, 3, 11, 0, 0, 0, 0, randomTimeZone);
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", DateTimeFormat.forPattern(timeZonePattern).print(randomDate))
.field("field", formatter.format(randomDate))
.endObject()),
XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length);
assertEquals(randomDate.withZone(DateTimeZone.UTC).getMillis(), fields[0].numericValue().longValue());
long millis = randomDate.withZoneSameInstant(ZoneOffset.UTC).toInstant().toEpochMilli();
assertEquals(millis, fields[0].numericValue().longValue());
}
public void testMergeDate() throws IOException {
@ -429,6 +407,6 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
}
}

View File

@ -33,6 +33,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.IndexSettings;
@ -45,6 +46,7 @@ import org.joda.time.DateTimeZone;
import org.junit.Before;
import java.io.IOException;
import java.time.ZoneOffset;
import java.util.Locale;
public class DateFieldTypeTests extends FieldTypeTestCase {
@ -67,7 +69,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
addModifier(new Modifier("locale", false) {
@Override
public void modify(MappedFieldType ft) {
((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("date_optional_time").withLocale(Locale.CANADA));
((DateFieldType) ft).setDateTimeFormatter(DateFormatter.forPattern("strict_date_optional_time").withLocale(Locale.CANADA));
}
});
nowInMillis = randomNonNegativeLong();
@ -110,8 +112,10 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
public void testIsFieldWithinQuery() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-12").getMillis();
long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2016-04-03").getMillis();
long instant1 =
DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12")).toInstant().toEpochMilli();
long instant2 =
DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2016-04-03")).toInstant().toEpochMilli();
Document doc = new Document();
LongPoint field = new LongPoint("my_date", instant1);
doc.add(field);
@ -138,25 +142,27 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
public void testValueFormat() {
MappedFieldType ft = createDefaultFieldType();
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-12T14:10:55").getMillis();
long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55"))
.toInstant().toEpochMilli();
assertEquals("2015-10-12T14:10:55.000Z",
ft.docValueFormat(null, DateTimeZone.UTC).format(instant));
ft.docValueFormat(null, ZoneOffset.UTC).format(instant));
assertEquals("2015-10-12T15:10:55.000+01:00",
ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant));
ft.docValueFormat(null, ZoneOffset.ofHours(1)).format(instant));
assertEquals("2015",
createDefaultFieldType().docValueFormat("yyyy", DateTimeZone.UTC).format(instant));
createDefaultFieldType().docValueFormat("YYYY", ZoneOffset.UTC).format(instant));
assertEquals(instant,
ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null));
ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null));
assertEquals(instant + 999,
ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null));
assertEquals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda("2015-10-13").getMillis() - 1,
ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null));
ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null));
long i = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-13")).toInstant().toEpochMilli();
assertEquals(i - 1, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null));
}
public void testValueForSearch() {
MappedFieldType ft = createDefaultFieldType();
String date = "2015-10-12T12:09:55.000Z";
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date).getMillis();
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date);
assertEquals(date, ft.valueForDisplay(instant));
}
@ -170,7 +176,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
MappedFieldType ft = createDefaultFieldType();
ft.setName("field");
String date = "2015-10-12T14:10:55";
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date).getMillis();
long instant = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
ft.setIndexOptions(IndexOptions.DOCS);
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant + 999),
@ -193,8 +199,9 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
ft.setName("field");
String date1 = "2015-10-12T14:10:55";
String date2 = "2016-04-28T11:33:52";
long instant1 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date1).getMillis();
long instant2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date2).getMillis() + 999;
long instant1 = DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
long instant2 =
DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() + 999;
ft.setIndexOptions(IndexOptions.DOCS);
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),

View File

@ -42,6 +42,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
import java.time.Instant;
import java.util.Collection;
import java.util.Collections;
@ -455,7 +456,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
.field("my_field3", 44)
.field("my_field4", 45)
.field("my_field5", 46)
.field("my_field6", 47)
.field("my_field6", Instant.now().toEpochMilli())
.field("my_field7", true)
.endObject());
Mapper myField1Mapper = null;

View File

@ -60,8 +60,6 @@ public class DynamicTemplatesTests extends ESSingleNodeTestCase {
assertThat(mapperService.fullName("l"), notNullValue());
assertNotSame(IndexOptions.NONE, mapperService.fullName("l").indexOptions());
}
public void testSimple() throws Exception {

View File

@ -458,7 +458,7 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)));
assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
assertEquals("Invalid format: [[test_format]]: Unknown pattern letter: t", e.getMessage());
}
}

View File

@ -104,11 +104,12 @@ public class RangeFieldQueryStringQueryBuilderTests extends AbstractQueryTestCas
DateMathParser parser = type.dateMathParser;
Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(createShardContext());
Query range = LongRange.newIntersectsQuery(DATE_RANGE_FIELD_NAME,
new long[]{ parser.parse("2010-01-01", () -> 0)}, new long[]{ parser.parse("2018-01-01", () -> 0)});
new long[]{ parser.parse("2010-01-01", () -> 0).toEpochMilli()},
new long[]{ parser.parse("2018-01-01", () -> 0).toEpochMilli()});
Query dv = RangeFieldMapper.RangeType.DATE.dvRangeQuery(DATE_RANGE_FIELD_NAME,
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
parser.parse("2010-01-01", () -> 0),
parser.parse("2018-01-01", () -> 0), true, true);
parser.parse("2010-01-01", () -> 0).toEpochMilli(),
parser.parse("2018-01-01", () -> 0).toEpochMilli(), true, true);
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
}

View File

@ -48,6 +48,8 @@ import org.junit.Before;
import java.net.InetAddress;
import java.util.Locale;
import static org.hamcrest.Matchers.containsString;
public class RangeFieldTypeTests extends FieldTypeTestCase {
RangeType type;
protected static String FIELDNAME = "field";
@ -111,17 +113,18 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
fieldType.setHasDocValues(false);
ShapeRelation relation = randomFrom(ShapeRelation.values());
// dates will break the default format
// dates will break the default format, month/day of month is turned around in the format
final String from = "2016-15-06T15:29:50+08:00";
final String to = "2016-16-06T15:29:50+08:00";
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> fieldType.rangeQuery(from, to, true, true, relation, null, null, context));
assertEquals("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]",
ex.getMessage());
assertThat(ex.getMessage(),
containsString("failed to parse date field [2016-15-06T15:29:50+08:00] with format [strict_date_optional_time||epoch_millis]")
);
// setting mapping format which is compatible with those dates
final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm:ssZZ");
final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm:ssZZZZZ");
assertEquals(1465975790000L, formatter.parseMillis(from));
assertEquals(1466062190000L, formatter.parseMillis(to));

View File

@ -64,9 +64,10 @@ import org.elasticsearch.index.search.QueryStringQueryParser;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.time.DateTimeException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@ -177,7 +178,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
queryStringQueryBuilder.minimumShouldMatch(randomMinimumShouldMatch());
}
if (randomBoolean()) {
queryStringQueryBuilder.timeZone(randomDateTimeZone().getID());
queryStringQueryBuilder.timeZone(randomZone().getId());
}
if (randomBoolean()) {
queryStringQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean());
@ -211,7 +212,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
String quoteFieldSuffix = instance.quoteFieldSuffix();
Float tieBreaker = instance.tieBreaker();
String minimumShouldMatch = instance.minimumShouldMatch();
String timeZone = instance.timeZone() == null ? null : instance.timeZone().getID();
String timeZone = instance.timeZone() == null ? null : instance.timeZone().getId();
boolean autoGenerateSynonymsPhraseQuery = instance.autoGenerateSynonymsPhraseQuery();
boolean fuzzyTranspositions = instance.fuzzyTranspositions();
@ -319,12 +320,12 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
break;
case 20:
if (timeZone == null) {
timeZone = randomDateTimeZone().getID();
timeZone = randomZone().getId();
} else {
if (randomBoolean()) {
timeZone = null;
} else {
timeZone = randomValueOtherThan(timeZone, () -> randomDateTimeZone().getID());
timeZone = randomValueOtherThan(timeZone, () -> randomZone().getId());
}
}
break;
@ -848,7 +849,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
QueryBuilder queryBuilder = parseQuery(queryAsString);
assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class));
QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder;
assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris")));
assertThat(queryStringQueryBuilder.timeZone(), equalTo(ZoneId.of("Europe/Paris")));
String invalidQueryAsString = "{\n" +
" \"query_string\":{\n" +
@ -856,7 +857,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
expectThrows(IllegalArgumentException.class, () -> parseQuery(invalidQueryAsString));
expectThrows(DateTimeException.class, () -> parseQuery(invalidQueryAsString));
}
public void testToQueryBooleanQueryMultipleBoosts() throws Exception {

View File

@ -44,10 +44,12 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.HashMap;
import java.util.Map;
@ -72,18 +74,22 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
break;
case 1:
// use mapped date field, using date string representation
Instant now = Instant.now();
ZonedDateTime start = now.minusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC);
ZonedDateTime end = now.plusMillis(randomIntBetween(0, 1000000)).atZone(ZoneOffset.UTC);
query = new RangeQueryBuilder(randomFrom(
DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, DATE_ALIAS_FIELD_NAME));
query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
query.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(start));
query.to(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(end));
// Create timestamp option only then we have a date mapper,
// otherwise we could trigger exception.
if (createShardContext().getMapperService().fullName(DATE_FIELD_NAME) != null) {
if (randomBoolean()) {
query.timeZone(randomDateTimeZone().getID());
query.timeZone(randomZone().getId());
}
if (randomBoolean()) {
query.format("yyyy-MM-dd'T'HH:mm:ss.SSSZZ");
String format = "strict_date_optional_time";
query.format(format);
}
}
break;
@ -444,7 +450,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
DateTime queryToValue = new DateTime(2016, 1, 1, 0, 0, 0, ISOChronology.getInstanceUTC());
query.from(queryFromValue);
query.to(queryToValue);
query.timeZone(randomDateTimeZone().getID());
query.timeZone(randomZone().getId());
query.format("yyyy-MM-dd");
QueryShardContext queryShardContext = createShardContext();
QueryBuilder rewritten = query.rewrite(queryShardContext);

View File

@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.cache.request.RequestCacheStats;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@ -34,8 +35,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.joda.time.DateTimeZone;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
@ -68,7 +69,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
// which used to not work well with the query cache because of the handles stream output
// see #9500
final SearchResponse r1 = client.prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH)
.addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00")).minDocCount(0)
.addAggregation(dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0)
.dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r1);
@ -80,7 +81,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
for (int i = 0; i < 10; ++i) {
final SearchResponse r2 = client.prepareSearch("index").setSize(0)
.setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(dateHistogram("histo").field("f")
.timeZone(DateTimeZone.forID("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
.timeZone(ZoneId.of("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
.get();
assertSearchResponse(r2);
Histogram h1 = r1.getAggregations().get("histo");
@ -246,15 +247,16 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get());
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
client.prepareIndex("index-2", "type", "4").setSource("d", now.minusDays(3)),
client.prepareIndex("index-2", "type", "5").setSource("d", now.minusDays(4)),
client.prepareIndex("index-2", "type", "6").setSource("d", now.minusDays(5)),
client.prepareIndex("index-3", "type", "7").setSource("d", now.minusDays(6)),
client.prepareIndex("index-3", "type", "8").setSource("d", now.minusDays(7)),
client.prepareIndex("index-3", "type", "9").setSource("d", now.minusDays(8)));
DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time");
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", formatter.format(now)),
client.prepareIndex("index-1", "type", "2").setSource("d", formatter.format(now.minusDays(1))),
client.prepareIndex("index-1", "type", "3").setSource("d", formatter.format(now.minusDays(2))),
client.prepareIndex("index-2", "type", "4").setSource("d", formatter.format(now.minusDays(3))),
client.prepareIndex("index-2", "type", "5").setSource("d", formatter.format(now.minusDays(4))),
client.prepareIndex("index-2", "type", "6").setSource("d", formatter.format(now.minusDays(5))),
client.prepareIndex("index-3", "type", "7").setSource("d", formatter.format(now.minusDays(6))),
client.prepareIndex("index-3", "type", "8").setSource("d", formatter.format(now.minusDays(7))),
client.prepareIndex("index-3", "type", "9").setSource("d", formatter.format(now.minusDays(8))));
ensureSearchable("index-1", "index-2", "index-3");
assertCacheState(client, "index-1", 0, 0);
assertCacheState(client, "index-2", 0, 0);

View File

@ -29,8 +29,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.List;
@ -60,15 +60,15 @@ public class DocValueFormatTests extends ESTestCase {
assertEquals(DocValueFormat.Decimal.class, vf.getClass());
assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern);
DocValueFormat.DateTime dateFormat =
new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
DateFormatter formatter = DateFormatter.forPattern("epoch_second");
DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(formatter, ZoneOffset.ofHours(1));
out = new BytesStreamOutput();
out.writeNamedWriteable(dateFormat);
in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry);
vf = in.readNamedWriteable(DocValueFormat.class);
assertEquals(DocValueFormat.DateTime.class, vf.getClass());
assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.pattern());
assertEquals(DateTimeZone.forOffsetHours(1), ((DocValueFormat.DateTime) vf).timeZone);
assertEquals(ZoneOffset.ofHours(1), ((DocValueFormat.DateTime) vf).timeZone);
out = new BytesStreamOutput();
out.writeNamedWriteable(DocValueFormat.GEOHASH);

View File

@ -36,7 +36,7 @@ public class AutoDateHistogramTests extends BaseAggregationTestCase<AutoDateHist
builder.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
builder.timeZone(randomDateTimeZone());
builder.timeZone(randomZone());
}
return builder;
}

View File

@ -22,12 +22,12 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
@ -46,13 +46,14 @@ import org.elasticsearch.search.aggregations.metrics.Avg;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.junit.After;
import java.io.IOException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -83,21 +84,21 @@ import static org.hamcrest.core.IsNull.notNullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class DateHistogramIT extends ESIntegTestCase {
static Map<DateTime, Map<String, Object>> expectedMultiSortBuckets;
static Map<ZonedDateTime, Map<String, Object>> expectedMultiSortBuckets;
private DateTime date(int month, int day) {
return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
private ZonedDateTime date(int month, int day) {
return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC);
}
private DateTime date(String date) {
return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date);
private ZonedDateTime date(String date) {
return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
private static String format(DateTime date, String pattern) {
return DateTimeFormat.forPattern(pattern).print(date);
private static String format(ZonedDateTime date, String pattern) {
return DateFormatter.forPattern(pattern).format(date);
}
private IndexRequestBuilder indexDoc(String idx, DateTime date, int value) throws Exception {
private IndexRequestBuilder indexDoc(String idx, ZonedDateTime date, int value) throws Exception {
return client().prepareIndex(idx, "type").setSource(jsonBuilder()
.startObject()
.timeField("date", date)
@ -142,7 +143,7 @@ public class DateHistogramIT extends ESIntegTestCase {
ensureSearchable();
}
private void addExpectedBucket(DateTime key, long docCount, double avg, double sum) {
private void addExpectedBucket(ZonedDateTime key, long docCount, double avg, double sum) {
Map<String, Object> bucketProps = new HashMap<>();
bucketProps.put("_count", docCount);
bucketProps.put("avg_l", avg);
@ -196,13 +197,12 @@ public class DateHistogramIT extends ESIntegTestCase {
internalCluster().wipeIndices("idx2");
}
private static String getBucketKeyAsString(DateTime key) {
return getBucketKeyAsString(key, DateTimeZone.UTC);
private static String getBucketKeyAsString(ZonedDateTime key) {
return getBucketKeyAsString(key, ZoneOffset.UTC);
}
private static String getBucketKeyAsString(DateTime key, DateTimeZone tz) {
ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(tz);
return DateFormatter.forPattern(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.pattern()).withZone(zoneId).formatJoda(key);
private static String getBucketKeyAsString(ZonedDateTime key, ZoneId tz) {
return DateFormatter.forPattern("strict_date_optional_time").withZone(tz).format(key);
}
public void testSingleValuedField() throws Exception {
@ -218,35 +218,34 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
public void testSingleValuedFieldWithTimeZone() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date")
.dateHistogramInterval(DateHistogramInterval.DAY)
.minDocCount(1)
.timeZone(DateTimeZone.forID("+01:00"))).get();
DateTimeZone tz = DateTimeZone.forID("+01:00");
.addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
.timeZone(ZoneId.of("+01:00"))).execute()
.actionGet();
ZoneId tz = ZoneId.of("+01:00");
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@ -255,46 +254,46 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
DateTime key = new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(4);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(5);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
}
@ -304,7 +303,7 @@ public class DateHistogramIT extends ESIntegTestCase {
if (randomBoolean()) {
format = format + "||date_optional_time";
}
DateTimeZone tz = DateTimeZone.forID("+01:00");
ZoneId tz = ZoneId.of("+01:00");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date")
.dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
@ -318,21 +317,25 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));
List<DateTime> expectedKeys = new ArrayList<>();
expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC));
List<ZonedDateTime> expectedKeys = new ArrayList<>();
expectedKeys.add(ZonedDateTime.of(2012, 1, 1, 23, 0, 0, 0, ZoneOffset.UTC));
expectedKeys.add(ZonedDateTime.of(2012, 2, 1, 23, 0, 0, 0, ZoneOffset.UTC));
expectedKeys.add(ZonedDateTime.of(2012, 2, 14, 23, 0, 0, 0, ZoneOffset.UTC));
expectedKeys.add(ZonedDateTime.of(2012, 3, 1, 23, 0, 0, 0, ZoneOffset.UTC));
expectedKeys.add(ZonedDateTime.of(2012, 3, 14, 23, 0, 0, 0, ZoneOffset.UTC));
expectedKeys.add(ZonedDateTime.of(2012, 3, 22, 23, 0, 0, 0, ZoneOffset.UTC));
Iterator<DateTime> keyIterator = expectedKeys.iterator();
Iterator<ZonedDateTime> keyIterator = expectedKeys.iterator();
for (Histogram.Bucket bucket : buckets) {
assertThat(bucket, notNullValue());
DateTime expectedKey = keyIterator.next();
assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider)));
assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey));
ZonedDateTime expectedKey = keyIterator.next();
String bucketKey = bucket.getKeyAsString();
String expectedBucketName = Long.toString(expectedKey.toInstant().toEpochMilli() / millisDivider);
if (JavaVersion.current().getVersion().get(0) == 8 && bucket.getKeyAsString().endsWith(".0")) {
expectedBucketName = expectedBucketName + ".0";
}
assertThat(bucketKey, equalTo(expectedBucketName));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(expectedKey));
assertThat(bucket.getDocCount(), equalTo(1L));
}
}
@ -355,7 +358,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 0;
for (Histogram.Bucket bucket : buckets) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@ -377,7 +380,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@ -399,7 +402,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@ -421,7 +424,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@ -444,42 +447,42 @@ public class DateHistogramIT extends ESIntegTestCase {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count");
Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value");
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(1.0));
assertThat((DateTime) propertiesKeys[0], equalTo(key));
assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key));
assertThat((long) propertiesDocCounts[0], equalTo(1L));
assertThat((double) propertiesCounts[0], equalTo(1.0));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(5.0));
assertThat((DateTime) propertiesKeys[1], equalTo(key));
assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key));
assertThat((long) propertiesDocCounts[1], equalTo(2L));
assertThat((double) propertiesCounts[1], equalTo(5.0));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat(sum.getValue(), equalTo(15.0));
assertThat((DateTime) propertiesKeys[2], equalTo(key));
assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key));
assertThat((long) propertiesDocCounts[2], equalTo(3L));
assertThat((double) propertiesCounts[2], equalTo(15.0));
}
@ -502,7 +505,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 0;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i++;
}
}
@ -525,7 +528,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@ -548,7 +551,7 @@ public class DateHistogramIT extends ESIntegTestCase {
int i = 2;
for (Histogram.Bucket bucket : histo.getBuckets()) {
assertThat(((DateTime) bucket.getKey()), equalTo(new DateTime(2012, i + 1, 1, 0, 0, DateTimeZone.UTC)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(ZonedDateTime.of(2012, i + 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)));
i--;
}
}
@ -625,25 +628,25 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -669,32 +672,32 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -763,32 +766,32 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
DateTime key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 5, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 5, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -817,25 +820,25 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -856,32 +859,32 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
key = new DateTime(2012, 4, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -922,25 +925,25 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2012, 1, 1, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 2, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 3, 1, 0, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -971,7 +974,7 @@ public class DateHistogramIT extends ESIntegTestCase {
public void testSingleValueWithTimeZone() throws Exception {
prepareCreate("idx2").addMapping("type", "date", "type=date").get();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
DateTime date = date("2014-03-11T00:00:00+00:00");
ZonedDateTime date = date("2014-03-11T00:00:00+00:00");
for (int i = 0; i < reqs.length; i++) {
reqs[i] = client().prepareIndex("idx2", "type", "" + i)
.setSource(jsonBuilder().startObject().timeField("date", date).endObject());
@ -983,9 +986,9 @@ public class DateHistogramIT extends ESIntegTestCase {
.setQuery(matchAllQuery())
.addAggregation(dateHistogram("date_histo")
.field("date")
.timeZone(DateTimeZone.forID("-02:00"))
.timeZone(ZoneId.of("-02:00"))
.dateHistogramInterval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd:HH-mm-ssZZ"))
.format("yyyy-MM-dd:HH-mm-ssZZZZZ"))
.get();
assertThat(response.getHits().getTotalHits().value, equalTo(5L));
@ -1010,8 +1013,9 @@ public class DateHistogramIT extends ESIntegTestCase {
// we're testing on days, so the base must be rounded to a day
int interval = randomIntBetween(1, 2); // in days
long intervalMillis = interval * 24 * 60 * 60 * 1000;
DateTime base = new DateTime(DateTimeZone.UTC).dayOfMonth().roundFloorCopy();
DateTime baseKey = new DateTime(intervalMillis * (base.getMillis() / intervalMillis), DateTimeZone.UTC);
ZonedDateTime base = ZonedDateTime.now(ZoneOffset.UTC).withDayOfMonth(1);
ZonedDateTime baseKey = Instant.ofEpochMilli(intervalMillis * (base.toInstant().toEpochMilli() / intervalMillis))
.atZone(ZoneOffset.UTC);
prepareCreate("idx2")
.setSettings(
@ -1028,7 +1032,7 @@ public class DateHistogramIT extends ESIntegTestCase {
} else {
int docCount = randomIntBetween(1, 3);
for (int j = 0; j < docCount; j++) {
DateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
ZonedDateTime date = baseKey.plusDays(i * interval + randomIntBetween(0, interval - 1));
builders.add(indexDoc("idx2", date, j));
}
docCounts[i] = docCount;
@ -1037,19 +1041,19 @@ public class DateHistogramIT extends ESIntegTestCase {
indexRandom(true, builders);
ensureSearchable("idx2");
DateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
ZonedDateTime lastDataBucketKey = baseKey.plusDays((numOfBuckets - 1) * interval);
// randomizing the number of buckets on the min bound
// (can sometimes fall within the data range, but more frequently will fall before the data range)
int addedBucketsLeft = randomIntBetween(0, numOfBuckets);
DateTime boundsMinKey;
ZonedDateTime boundsMinKey;
if (frequently()) {
boundsMinKey = baseKey.minusDays(addedBucketsLeft * interval);
} else {
boundsMinKey = baseKey.plusDays(addedBucketsLeft * interval);
addedBucketsLeft = 0;
}
DateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
ZonedDateTime boundsMin = boundsMinKey.plusDays(randomIntBetween(0, interval - 1));
// randomizing the number of buckets on the max bound
// (can sometimes fall within the data range, but more frequently will fall after the data range)
@ -1059,8 +1063,8 @@ public class DateHistogramIT extends ESIntegTestCase {
addedBucketsRight = 0;
boundsMaxKeyDelta = -boundsMaxKeyDelta;
}
DateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
DateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
ZonedDateTime boundsMaxKey = lastDataBucketKey.plusDays(boundsMaxKeyDelta);
ZonedDateTime boundsMax = boundsMaxKey.plusDays(randomIntBetween(0, interval - 1));
// it could be that the random bounds.min we chose ended up greater than
// bounds.max - this should
@ -1105,11 +1109,11 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(bucketsCount));
DateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
ZonedDateTime key = baseKey.isBefore(boundsMinKey) ? baseKey : boundsMinKey;
for (int i = 0; i < bucketsCount; i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getKeyAsString(), equalTo(format(key, pattern)));
assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i]));
key = key.plusDays(interval);
@ -1126,15 +1130,15 @@ public class DateHistogramIT extends ESIntegTestCase {
.setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1).put("index.number_of_replicas", 0))
.get();
DateMathParser parser = Joda.getStrictStandardDateFormatter().toDateMathParser();
DateMathParser parser = DateFormatter.forPattern("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis").toDateMathParser();
// we pick a random timezone offset of +12/-12 hours and insert two documents
// one at 00:00 in that time zone and one at 12:00
List<IndexRequestBuilder> builders = new ArrayList<>();
int timeZoneHourOffset = randomIntBetween(-12, 12);
DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
ZoneId timezone = ZoneOffset.ofHours(timeZoneHourOffset);
ZonedDateTime timeZoneStartToday = parser.parse("now/d", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
ZonedDateTime timeZoneNoonToday = parser.parse("now/d+12h", System::currentTimeMillis, false, timezone).atZone(ZoneOffset.UTC);
builders.add(indexDoc(index, timeZoneStartToday, 1));
builders.add(indexDoc(index, timeZoneNoonToday, 2));
indexRandom(true, builders);
@ -1145,7 +1149,7 @@ public class DateHistogramIT extends ESIntegTestCase {
response = client()
.prepareSearch(index)
.setQuery(QueryBuilders.rangeQuery("date")
.from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID()))
.from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()))
.addAggregation(
dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1))
.timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))
@ -1164,8 +1168,8 @@ public class DateHistogramIT extends ESIntegTestCase {
for (int i = 0; i < buckets.size(); i++) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat("InternalBucket " + i + " had wrong key", (DateTime) bucket.getKey(),
equalTo(new DateTime(timeZoneStartToday.getMillis() + (i * 60 * 60 * 1000), DateTimeZone.UTC)));
ZonedDateTime zonedDateTime = timeZoneStartToday.plus(i * 60 * 60 * 1000, ChronoUnit.MILLIS);
assertThat("InternalBucket " + i + " had wrong key", (ZonedDateTime) bucket.getKey(), equalTo(zonedDateTime));
if (i == 0 || i == 12) {
assertThat(bucket.getDocCount(), equalTo(1L));
} else {
@ -1186,10 +1190,11 @@ public class DateHistogramIT extends ESIntegTestCase {
.get();
List<IndexRequestBuilder> builders = new ArrayList<>();
builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 1));
builders.add(indexDoc(index, DateTime.parse("2016-01-03T08:00:00.000Z"), 2));
builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 3));
builders.add(indexDoc(index, DateTime.parse("2016-01-06T08:00:00.000Z"), 4));
DateFormatter formatter = DateFormatter.forPattern("date_optional_time");
builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 1));
builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-03T08:00:00.000Z")), 2));
builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 3));
builders.add(indexDoc(index, DateFormatters.toZonedDateTime(formatter.parse("2016-01-06T08:00:00.000Z")), 4));
indexRandom(true, builders);
ensureSearchable(index);
@ -1233,7 +1238,7 @@ public class DateHistogramIT extends ESIntegTestCase {
public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception {
String mappingJson = Strings.toString(jsonBuilder().startObject()
.startObject("type").startObject("properties")
.startObject("date").field("type", "date").field("format", "dateOptionalTime||dd-MM-yyyy")
.startObject("date").field("type", "date").field("format", "strict_date_optional_time||dd-MM-yyyy")
.endObject().endObject().endObject().endObject());
prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get();
IndexRequestBuilder[] reqs = new IndexRequestBuilder[5];
@ -1256,23 +1261,23 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(1));
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2014, 3, 10, 0, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(5L));
}
public void testIssue6965() {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("+01:00"))
.dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0))
.addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00"))
.dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0))
.get();
assertSearchResponse(response);
DateTimeZone tz = DateTimeZone.forID("+01:00");
ZoneId tz = ZoneId.of("+01:00");
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
@ -1280,25 +1285,25 @@ public class DateHistogramIT extends ESIntegTestCase {
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(3));
DateTime key = new DateTime(2011, 12, 31, 23, 0, DateTimeZone.UTC);
ZonedDateTime key = ZonedDateTime.of(2011, 12, 31, 23, 0, 0, 0, ZoneOffset.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1L));
key = new DateTime(2012, 1, 31, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 1, 31, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2L));
key = new DateTime(2012, 2, 29, 23, 0, DateTimeZone.UTC);
key = ZonedDateTime.of(2012, 2, 29, 23, 0, 0, 0, ZoneOffset.UTC);
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(getBucketKeyAsString(key, tz)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3L));
}
@ -1309,7 +1314,7 @@ public class DateHistogramIT extends ESIntegTestCase {
ensureSearchable("test9491");
SearchResponse response = client().prepareSearch("test9491")
.addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.YEAR)
.timeZone(DateTimeZone.forID("Asia/Jerusalem")))
.timeZone(ZoneId.of("Asia/Jerusalem")).format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX"))
.get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@ -1327,8 +1332,8 @@ public class DateHistogramIT extends ESIntegTestCase {
ensureSearchable("test8209");
SearchResponse response = client().prepareSearch("test8209")
.addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH)
.timeZone(DateTimeZone.forID("CET"))
.minDocCount(0))
.format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
.timeZone(ZoneId.of("CET")).minDocCount(0))
.get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
@ -1371,7 +1376,7 @@ public class DateHistogramIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch(indexDateUnmapped)
.addAggregation(
dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("YYYY-MM")
dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("yyyy-MM")
.minDocCount(0).extendedBounds(new ExtendedBounds("2018-01", "2018-01")))
.get();
assertSearchResponse(response);
@ -1393,15 +1398,19 @@ public class DateHistogramIT extends ESIntegTestCase {
indexRandom(true, client().prepareIndex(index, "type").setSource("d", "1477954800000"));
ensureSearchable(index);
SearchResponse response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d")
.dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin"))).get();
.dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))).get();
assertSearchResponse(response);
Histogram histo = response.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(1));
assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000"));
if (JavaVersion.current().getVersion().get(0) == 8 && histo.getBuckets().get(0).getKeyAsString().endsWith(".0")) {
assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000.0"));
} else {
assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000"));
}
assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L));
response = client().prepareSearch(index).addAggregation(dateHistogram("histo").field("d")
.dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("Europe/Berlin")).format("yyyy-MM-dd"))
.dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")).format("yyyy-MM-dd"))
.get();
assertSearchResponse(response);
histo = response.getAggregations().get("histo");
@ -1422,7 +1431,7 @@ public class DateHistogramIT extends ESIntegTestCase {
public void testDSTEndTransition() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setQuery(new MatchNoneQueryBuilder())
.addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("Europe/Oslo"))
.addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo"))
.dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds(
new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
.get();
@ -1430,9 +1439,12 @@ public class DateHistogramIT extends ESIntegTestCase {
Histogram histo = response.getAggregations().get("histo");
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(4));
assertThat(((DateTime) buckets.get(1).getKey()).getMillis() - ((DateTime) buckets.get(0).getKey()).getMillis(), equalTo(3600000L));
assertThat(((DateTime) buckets.get(2).getKey()).getMillis() - ((DateTime) buckets.get(1).getKey()).getMillis(), equalTo(3600000L));
assertThat(((DateTime) buckets.get(3).getKey()).getMillis() - ((DateTime) buckets.get(2).getKey()).getMillis(), equalTo(3600000L));
assertThat(((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli() -
((ZonedDateTime) buckets.get(0).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
assertThat(((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli() -
((ZonedDateTime) buckets.get(1).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
assertThat(((ZonedDateTime) buckets.get(3).getKey()).toInstant().toEpochMilli() -
((ZonedDateTime) buckets.get(2).getKey()).toInstant().toEpochMilli(), equalTo(3600000L));
}
/**
@ -1443,8 +1455,10 @@ public class DateHistogramIT extends ESIntegTestCase {
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=date")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get());
indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date(1, 1)),
client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date(2, 1)));
String date = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(1, 1));
String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1));
indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("d", date),
client().prepareIndex("cache_test_idx", "type", "2").setSource("d", date2));
// Make sure we are starting with a clear cache
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
@ -1514,7 +1528,7 @@ public class DateHistogramIT extends ESIntegTestCase {
}
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
DateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(DateTime[]::new);
ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
SearchResponse response = client()
.prepareSearch("sort_idx")
.setTypes("type")
@ -1544,7 +1558,7 @@ public class DateHistogramIT extends ESIntegTestCase {
}
}
private DateTime key(Histogram.Bucket bucket) {
return (DateTime) bucket.getKey();
private ZonedDateTime key(Histogram.Bucket bucket) {
return (ZonedDateTime) bucket.getKey();
}
}

View File

@ -20,18 +20,19 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.List;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
@ -50,9 +51,10 @@ import static org.hamcrest.core.IsNull.notNullValue;
public class DateHistogramOffsetIT extends ESIntegTestCase {
private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss";
private static final DateFormatter FORMATTER = DateFormatter.forPattern(DATE_FORMAT);
private DateTime date(String date) {
return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(date);
private ZonedDateTime date(String date) {
return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date));
}
@Before
@ -65,8 +67,9 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
internalCluster().wipeIndices("idx2");
}
private void prepareIndex(DateTime date, int numHours, int stepSizeHours, int idxIdStart)
throws IOException, InterruptedException, ExecutionException {
private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, int idxIdStart)
throws IOException, InterruptedException {
IndexRequestBuilder[] reqs = new IndexRequestBuilder[numHours];
for (int i = idxIdStart; i < idxIdStart + reqs.length; i++) {
reqs[i - idxIdStart] = client().prepareIndex("idx2", "type", "" + i)
@ -94,8 +97,8 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 2, 0, DateTimeZone.UTC), 2L);
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 2, 0, DateTimeZone.UTC), 3L);
checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 2, 0, 0, 0, ZoneOffset.UTC), 2L);
checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 2, 0, 0, 0, ZoneOffset.UTC), 3L);
}
public void testSingleValueWithNegativeOffset() throws Exception {
@ -116,8 +119,8 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 9, 22, 0, DateTimeZone.UTC), 2L);
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 10, 22, 0, DateTimeZone.UTC), 3L);
checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 9, 22, 0, 0, 0, ZoneOffset.UTC), 2L);
checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 10, 22, 0, 0, 0, ZoneOffset.UTC), 3L);
}
/**
@ -143,11 +146,11 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(5));
checkBucketFor(buckets.get(0), new DateTime(2014, 3, 10, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(1), new DateTime(2014, 3, 11, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(2), new DateTime(2014, 3, 12, 6, 0, DateTimeZone.UTC), 0L);
checkBucketFor(buckets.get(3), new DateTime(2014, 3, 13, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(4), new DateTime(2014, 3, 14, 6, 0, DateTimeZone.UTC), 6L);
checkBucketFor(buckets.get(0), ZonedDateTime.of(2014, 3, 10, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
checkBucketFor(buckets.get(1), ZonedDateTime.of(2014, 3, 11, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
checkBucketFor(buckets.get(2), ZonedDateTime.of(2014, 3, 12, 6, 0, 0, 0, ZoneOffset.UTC), 0L);
checkBucketFor(buckets.get(3), ZonedDateTime.of(2014, 3, 13, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
checkBucketFor(buckets.get(4), ZonedDateTime.of(2014, 3, 14, 6, 0, 0, 0, ZoneOffset.UTC), 6L);
}
/**
@ -155,10 +158,10 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
* @param key the expected key
* @param expectedSize the expected size of the bucket
*/
private static void checkBucketFor(Histogram.Bucket bucket, DateTime key, long expectedSize) {
private static void checkBucketFor(Histogram.Bucket bucket, ZonedDateTime key, long expectedSize) {
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo(key.toString(DATE_FORMAT)));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getKeyAsString(), equalTo(FORMATTER.format(key)));
assertThat(((ZonedDateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(expectedSize));
}
}

View File

@ -18,9 +18,11 @@
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.bootstrap.JavaVersion;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
@ -33,9 +35,10 @@ import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.metrics.Sum;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -70,12 +73,12 @@ public class DateRangeIT extends ESIntegTestCase {
.endObject());
}
private static DateTime date(int month, int day) {
return date(month, day, DateTimeZone.UTC);
private static ZonedDateTime date(int month, int day) {
return date(month, day, ZoneOffset.UTC);
}
private static DateTime date(int month, int day, DateTimeZone timezone) {
return new DateTime(2012, month, day, 0, 0, timezone);
private static ZonedDateTime date(int month, int day, ZoneId timezone) {
return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, timezone);
}
private static int numDocs;
@ -128,7 +131,7 @@ public class DateRangeIT extends ESIntegTestCase {
.prepareSearch("idx")
.addAggregation(
rangeBuilder.addUnboundedTo("a long time ago", "now-50y").addRange("recently", "now-50y", "now-1y")
.addUnboundedFrom("last year", "now-1y").timeZone(DateTimeZone.forID("EST"))).get();
.addUnboundedFrom("last year", "now-1y").timeZone(ZoneId.of("Etc/GMT+5"))).get();
assertSearchResponse(response);
@ -176,8 +179,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -185,8 +188,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -194,8 +197,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@ -222,8 +225,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -231,8 +234,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -240,8 +243,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@ -269,8 +272,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -278,8 +281,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15-2012-03-15"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -287,19 +290,17 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
}
public void testSingleValueFieldWithDateMath() throws Exception {
DateTimeZone timezone = randomDateTimeZone();
int timeZoneOffset = timezone.getOffset(date(2, 15));
// if time zone is UTC (or equivalent), time zone suffix is "Z", else something like "+03:00", which we get with the "ZZ" format
String feb15Suffix = timeZoneOffset == 0 ? "Z" : date(2,15, timezone).toString("ZZ");
String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3,15, timezone).toString("ZZ");
ZoneId timezone = randomZone();
int timeZoneOffset = timezone.getRules().getOffset(date(2, 15).toInstant()).getTotalSeconds();
String suffix = timezone.equals(ZoneOffset.UTC) ? "Z" : timezone.getId();
long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L;
SearchResponse response = client().prepareSearch("idx")
@ -321,29 +322,29 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + feb15Suffix));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000" + suffix));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000" + suffix));
assertThat(bucket.getDocCount(), equalTo(expectedFirstBucketCount));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix +
"-2012-03-15T00:00:00.000" + mar15Suffix));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).toDateTime(DateTimeZone.UTC)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + feb15Suffix));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000" + suffix +
"-2012-03-15T00:00:00.000" + suffix));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000" + suffix));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000" + suffix));
assertThat(bucket.getDocCount(), equalTo(2L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix + "-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).toDateTime(DateTimeZone.UTC)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + mar15Suffix));
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000" + suffix + "-*"));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15, timezone).withZoneSameInstant(ZoneOffset.UTC)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000" + suffix));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L - expectedFirstBucketCount));
}
@ -369,8 +370,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -378,8 +379,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -387,8 +388,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@ -429,8 +430,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r1"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -444,8 +445,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r2"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -459,8 +460,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("r3"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@ -502,8 +503,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -511,8 +512,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(3L));
@ -520,8 +521,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
@ -557,8 +558,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(1L));
@ -566,8 +567,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -575,8 +576,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 1L));
@ -616,8 +617,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -625,8 +626,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -634,8 +635,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@ -675,8 +676,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -684,8 +685,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(3L));
@ -693,8 +694,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2L));
@ -723,8 +724,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@ -732,8 +733,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@ -741,8 +742,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
@ -769,8 +770,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@ -778,8 +779,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(0L));
@ -787,8 +788,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0L));
@ -815,8 +816,8 @@ public class DateRangeIT extends ESIntegTestCase {
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("*-2012-02-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), nullValue());
assertThat(((DateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), nullValue());
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(2, 15)));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -824,8 +825,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-02-15T00:00:00.000Z-2012-03-15T00:00:00.000Z"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((DateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(2, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), equalTo(date(3, 15)));
assertThat(bucket.getFromAsString(), equalTo("2012-02-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getDocCount(), equalTo(2L));
@ -833,8 +834,8 @@ public class DateRangeIT extends ESIntegTestCase {
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat((String) bucket.getKey(), equalTo("2012-03-15T00:00:00.000Z-*"));
assertThat(((DateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((DateTime) bucket.getTo()), nullValue());
assertThat(((ZonedDateTime) bucket.getFrom()), equalTo(date(3, 15)));
assertThat(((ZonedDateTime) bucket.getTo()), nullValue());
assertThat(bucket.getFromAsString(), equalTo("2012-03-15T00:00:00.000Z"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4L));
@ -859,8 +860,8 @@ public class DateRangeIT extends ESIntegTestCase {
assertThat(dateRange.getName(), equalTo("date_range"));
assertThat(buckets.size(), is(1));
assertThat((String) buckets.get(0).getKey(), equalTo("0-1"));
assertThat(((DateTime) buckets.get(0).getFrom()).getMillis(), equalTo(0L));
assertThat(((DateTime) buckets.get(0).getTo()).getMillis(), equalTo(1L));
assertThat(((ZonedDateTime) buckets.get(0).getFrom()).toInstant().toEpochMilli(), equalTo(0L));
assertThat(((ZonedDateTime) buckets.get(0).getTo()).toInstant().toEpochMilli(), equalTo(1L));
assertThat(buckets.get(0).getDocCount(), equalTo(0L));
assertThat(buckets.get(0).getAggregations().asList().isEmpty(), is(true));
}
@ -903,7 +904,8 @@ public class DateRangeIT extends ESIntegTestCase {
params.put("fieldname", "date");
SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date")
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params))
.addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC)))
.addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))
.get();
assertSearchResponse(r);
@ -915,7 +917,8 @@ public class DateRangeIT extends ESIntegTestCase {
// To make sure that the cache is working test that a request not using
// a script is cached
r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(dateRange("foo").field("date")
.addRange(new DateTime(2012, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC), new DateTime(2013, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC)))
.addRange(ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2013, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC)))
.get();
assertSearchResponse(r);
@ -969,10 +972,9 @@ public class DateRangeIT extends ESIntegTestCase {
assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L);
// providing numeric input without format should throw an exception
Exception e = expectThrows(Exception.class, () -> client().prepareSearch(indexName).setSize(0)
ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)).get());
Throwable cause = e.getCause();
assertThat(cause.getMessage(),
assertThat(e.getDetailedMessage(),
containsString("failed to parse date field [1000000] with format [strict_hour_minute_second]"));
}
@ -984,9 +986,9 @@ public class DateRangeIT extends ESIntegTestCase {
String indexName = "dateformat_numeric_test_idx";
assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second"));
indexRandom(true,
client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1000).endObject()),
client().prepareIndex(indexName, "type", "1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()),
client().prepareIndex(indexName, "type", "2").setSource(jsonBuilder().startObject().field("date", 2000).endObject()),
client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3000).endObject()));
client().prepareIndex(indexName, "type", "3").setSource(jsonBuilder().startObject().field("date", 3008).endObject()));
// using no format should work when to/from is compatible with format in
// mapping
@ -994,39 +996,39 @@ public class DateRangeIT extends ESIntegTestCase {
.addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
List<Bucket> buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
if (JavaVersion.current().getVersion().get(0) == 8) {
assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L);
} else {
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
}
// using no format should also work when and to/from are string values
searchResponse = client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
if (JavaVersion.current().getVersion().get(0) == 8) {
assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L);
} else {
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
}
// also e-notation should work, fractional parts should be truncated
searchResponse = client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
// also e-notation and floats provided as string also be truncated (see: #14641)
searchResponse = client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange("1.0e3", "3.0e3").addRange("3.0e3", "4.0e3")).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
searchResponse = client().prepareSearch(indexName).setSize(0)
.addAggregation(dateRange("date_range").field("date").addRange("1000.123", "3000.8").addRange("3000.8", "4000.3")).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
if (JavaVersion.current().getVersion().get(0) == 8) {
assertBucket(buckets.get(0), 2L, "1000.0-3000.0", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000.0-4000.0", 3000000L, 4000000L);
} else {
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
}
// using different format should work when to/from is compatible with
// format in aggregation
@ -1061,8 +1063,8 @@ public class DateRangeIT extends ESIntegTestCase {
private static void assertBucket(Bucket bucket, long bucketSize, String expectedKey, long expectedFrom, long expectedTo) {
assertThat(bucket.getDocCount(), equalTo(bucketSize));
assertThat((String) bucket.getKey(), equalTo(expectedKey));
assertThat(((DateTime) bucket.getFrom()).getMillis(), equalTo(expectedFrom));
assertThat(((DateTime) bucket.getTo()).getMillis(), equalTo(expectedTo));
assertThat(((ZonedDateTime) bucket.getFrom()).toInstant().toEpochMilli(), equalTo(expectedFrom));
assertThat(((ZonedDateTime) bucket.getTo()).toInstant().toEpochMilli(), equalTo(expectedTo));
assertThat(bucket.getAggregations().asList().isEmpty(), is(true));
}
}

View File

@ -65,7 +65,7 @@ public class DateRangeTests extends BaseAggregationTestCase<DateRangeAggregation
factory.missing(randomIntBetween(0, 10));
}
if (randomBoolean()) {
factory.timeZone(randomDateTimeZone());
factory.timeZone(randomZone());
}
return factory;
}

View File

@ -22,11 +22,10 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
@ -124,7 +123,7 @@ public class MinDocCountIT extends AbstractTermsTestCase {
double doubleTerm = longTerm * Math.PI;
ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC);
String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time);
String dateTerm = DateFormatter.forPattern("yyyy-MM-dd").format(time);
final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20);
for (int j = 0; j < frequency; ++j) {
indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()

View File

@ -42,7 +42,7 @@ public class CompositeAggregationBuilderTests extends BaseAggregationTestCase<Co
histo.interval(randomNonNegativeLong());
}
if (randomBoolean()) {
histo.timeZone(randomDateTimeZone());
histo.timeZone(randomZone());
}
if (randomBoolean()) {
histo.missingBucket(true);

View File

@ -39,6 +39,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.IpFieldMapper;
@ -57,12 +58,12 @@ import org.elasticsearch.search.aggregations.metrics.TopHits;
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.sort.SortOrder;
import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.net.InetAddress;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -1155,8 +1156,7 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
},
(result) -> {}
));
assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class));
assertThat(exc.getCause().getMessage(), containsString("Parse failure"));
assertThat(exc.getMessage(), containsString("failed to parse date field [1474329600000]"));
}
public void testWithDateHistogramAndTimeZone() throws IOException {
@ -1176,7 +1176,7 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
.field("date")
.dateHistogramInterval(DateHistogramInterval.days(1))
.timeZone(DateTimeZone.forOffsetHours(1));
.timeZone(ZoneOffset.ofHours(1));
return new CompositeAggregationBuilder("name", Collections.singletonList(histo));
},
(result) -> {
@ -1196,7 +1196,7 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
.field("date")
.dateHistogramInterval(DateHistogramInterval.days(1))
.timeZone(DateTimeZone.forOffsetHours(1));
.timeZone(ZoneOffset.ofHours(1));
return new CompositeAggregationBuilder("name", Collections.singletonList(histo))
.aggregateAfter(createAfterKey("date", 1474326000000L));
@ -1835,6 +1835,6 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
}
private static long asLong(String dateTime) {
return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis();
return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}

View File

@ -30,10 +30,10 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
import org.joda.time.DateTimeZone;
import org.junit.After;
import java.io.IOException;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -64,7 +64,7 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
if (isLong) {
// we use specific format only for date histogram on a long/date field
if (randomBoolean()) {
return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
return new DocValueFormat.DateTime(DateFormatter.forPattern("epoch_second"), ZoneOffset.ofHours(1));
} else {
return DocValueFormat.RAW;
}

View File

@ -33,6 +33,7 @@ import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -41,11 +42,13 @@ import org.elasticsearch.search.aggregations.MultiBucketConsumerService;
import org.elasticsearch.search.aggregations.metrics.InternalStats;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Assert;
import java.io.IOException;
import java.time.LocalDate;
import java.time.YearMonth;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -59,17 +62,17 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
private static final String DATE_FIELD = "date";
private static final String INSTANT_FIELD = "instant";
private static final List<DateTime> DATES_WITH_TIME = Arrays.asList(
new DateTime(2010, 3, 12, 1, 7, 45, DateTimeZone.UTC),
new DateTime(2010, 4, 27, 3, 43, 34, DateTimeZone.UTC),
new DateTime(2012, 5, 18, 4, 11, 0, DateTimeZone.UTC),
new DateTime(2013, 5, 29, 5, 11, 31, DateTimeZone.UTC),
new DateTime(2013, 10, 31, 8, 24, 5, DateTimeZone.UTC),
new DateTime(2015, 2, 13, 13, 9, 32, DateTimeZone.UTC),
new DateTime(2015, 6, 24, 13, 47, 43, DateTimeZone.UTC),
new DateTime(2015, 11, 13, 16, 14, 34, DateTimeZone.UTC),
new DateTime(2016, 3, 4, 17, 9, 50, DateTimeZone.UTC),
new DateTime(2017, 12, 12, 22, 55, 46, DateTimeZone.UTC));
private static final List<ZonedDateTime> DATES_WITH_TIME = Arrays.asList(
ZonedDateTime.of(2010, 3, 12, 1, 7, 45, 0, ZoneOffset.UTC),
ZonedDateTime.of(2010, 4, 27, 3, 43, 34, 0, ZoneOffset.UTC),
ZonedDateTime.of(2012, 5, 18, 4, 11, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2013, 5, 29, 5, 11, 31, 0, ZoneOffset.UTC),
ZonedDateTime.of(2013, 10, 31, 8, 24, 5, 0, ZoneOffset.UTC),
ZonedDateTime.of(2015, 2, 13, 13, 9, 32, 0, ZoneOffset.UTC),
ZonedDateTime.of(2015, 6, 24, 13, 47, 43, 0, ZoneOffset.UTC),
ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC),
ZonedDateTime.of(2016, 3, 4, 17, 9, 50, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC));
private static final Query DEFAULT_QUERY = new MatchAllDocsQuery();
@ -184,7 +187,7 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testNoDocs() throws IOException {
final List<DateTime> dates = Collections.emptyList();
final List<ZonedDateTime> dates = Collections.emptyList();
final Consumer<AutoDateHistogramAggregationBuilder> aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD);
testSearchCase(DEFAULT_QUERY, dates, aggregation,
@ -209,8 +212,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalYear() throws IOException {
final long start = new DateTime(DateTimeZone.UTC).withDate(2015, 1, 1).getMillis();
final long end = new DateTime(DateTimeZone.UTC).withDate(2017, 12, 31).getMillis();
final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end);
testSearchCase(rangeQuery, DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
@ -228,8 +233,8 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
testSearchAndReduceCase(rangeQuery, DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
histogram -> {
final DateTime startDate = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC);
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 3);
expectedDocCount.put(startDate.plusYears(1), 1);
expectedDocCount.put(startDate.plusYears(2), 1);
@ -243,13 +248,13 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalMonth() throws IOException {
final List<DateTime> datesForMonthInterval = Arrays.asList(
new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 3, 4, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 3, 5, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 3, 6, 0, 0, 0, DateTimeZone.UTC));
final List<ZonedDateTime> datesForMonthInterval = Arrays.asList(
ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 3, 4, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 3, 5, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@ -263,7 +268,7 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
testSearchAndReduceCase(DEFAULT_QUERY, datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
histogram -> {
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForMonthInterval.get(0).withDayOfMonth(1), 1);
expectedDocCount.put(datesForMonthInterval.get(1).withDayOfMonth(1), 2);
expectedDocCount.put(datesForMonthInterval.get(3).withDayOfMonth(1), 3);
@ -287,15 +292,15 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalDay() throws IOException {
final List<DateTime> datesForDayInterval = Arrays.asList(
new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC));
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
final List<ZonedDateTime> datesForDayInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC));
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForDayInterval.get(0), 1);
expectedDocCount.put(datesForDayInterval.get(1), 2);
expectedDocCount.put(datesForDayInterval.get(3), 3);
@ -321,16 +326,16 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalDayWithTZ() throws IOException {
final List<DateTime> datesForDayInterval = Arrays.asList(
new DateTime(2017, 2, 1, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 2, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 3, 0, 0, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 5, 0, 0, 0, DateTimeZone.UTC));
final List<ZonedDateTime> datesForDayInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForDayInterval,
aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> {
aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> {
final Map<String, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-01-31T23:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T23:00:00.000-01:00", 2);
@ -343,7 +348,7 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
assertTrue(AggregationInspectionHelper.hasValue(histogram));
});
testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval,
aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> {
aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), histogram -> {
final Map<String, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-01-31T00:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2);
@ -358,17 +363,17 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalHour() throws IOException {
final List<DateTime> datesForHourInterval = Arrays.asList(
new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC));
final List<ZonedDateTime> datesForHourInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
histogram -> {
@ -384,13 +389,13 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
histogram -> {
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 2);
expectedDocCount.put(datesForHourInterval.get(2).withMinuteOfHour(0), 1);
expectedDocCount.put(datesForHourInterval.get(3).withMinuteOfHour(0), 1);
expectedDocCount.put(datesForHourInterval.get(4).withMinuteOfHour(0), 2);
expectedDocCount.put(datesForHourInterval.get(6).withMinuteOfHour(0), 1);
expectedDocCount.put(datesForHourInterval.get(7).withMinuteOfHour(0), 3);
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 2);
expectedDocCount.put(datesForHourInterval.get(2).withMinute(0), 1);
expectedDocCount.put(datesForHourInterval.get(3).withMinute(0), 1);
expectedDocCount.put(datesForHourInterval.get(4).withMinute(0), 2);
expectedDocCount.put(datesForHourInterval.get(6).withMinute(0), 1);
expectedDocCount.put(datesForHourInterval.get(7).withMinute(0), 3);
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(8, buckets.size());
buckets.forEach(bucket ->
@ -400,10 +405,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD),
histogram -> {
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForHourInterval.get(0).withMinuteOfHour(0), 3);
expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinuteOfHour(0), 3);
expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinuteOfHour(0), 4);
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForHourInterval.get(0).withMinute(0), 3);
expectedDocCount.put(datesForHourInterval.get(0).plusHours(3).withMinute(0), 3);
expectedDocCount.put(datesForHourInterval.get(0).plusHours(6).withMinute(0), 4);
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(expectedDocCount.size(), buckets.size());
buckets.forEach(bucket ->
@ -413,22 +418,23 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalHourWithTZ() throws IOException {
final List<DateTime> datesForHourInterval = Arrays.asList(
new DateTime(2017, 2, 1, 9, 2, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 9, 35, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 10, 15, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 13, 6, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 14, 4, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 14, 5, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 15, 59, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 16, 6, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 16, 48, 0, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 16, 59, 0, DateTimeZone.UTC));
final List<ZonedDateTime> datesForHourInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)),
aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
histogram -> {
final List<String> dateStrings = datesForHourInterval.stream()
.map(dateTime -> dateTime.withZone(DateTimeZone.forOffsetHours(-1)).toString()).collect(Collectors.toList());
.map(dateTime -> DateFormatter.forPattern("strict_date_time")
.format(dateTime.withZoneSameInstant(ZoneOffset.ofHours(-1)))).collect(Collectors.toList());
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(datesForHourInterval.size(), buckets.size());
for (int i = 0; i < buckets.size(); i++) {
@ -439,7 +445,7 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
);
testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)),
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
histogram -> {
final Map<String, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put("2017-02-01T08:00:00.000-01:00", 2);
@ -458,10 +464,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
public void testRandomSecondIntervals() throws IOException {
final int length = 120;
final List<DateTime> dataset = new ArrayList<>(length);
final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, 0, DateTimeZone.UTC);
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final DateTime date = startDate.plusSeconds(i);
final ZonedDateTime date = startDate.plusSeconds(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
@ -487,10 +493,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
public void testRandomMinuteIntervals() throws IOException {
final int length = 120;
final List<DateTime> dataset = new ArrayList<>(length);
final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final DateTime date = startDate.plusMinutes(i);
final ZonedDateTime date = startDate.plusMinutes(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
@ -516,10 +522,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
public void testRandomHourIntervals() throws IOException {
final int length = 72;
final List<DateTime> dataset = new ArrayList<>(length);
final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final DateTime date = startDate.plusHours(i);
final ZonedDateTime date = startDate.plusHours(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
@ -544,10 +550,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
public void testRandomDayIntervals() throws IOException {
final int length = 140;
final List<DateTime> dataset = new ArrayList<>(length);
final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final DateTime date = startDate.plusDays(i);
final ZonedDateTime date = startDate.plusDays(i);
dataset.add(date);
}
final int randomChoice = randomIntBetween(1, 3);
@ -583,17 +589,17 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
final int randomIndex = randomInt(2);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusMonths(randomIndex), bucket.getKey());
assertEquals(startDate.plusMonths(randomIndex).dayOfMonth().getMaximumValue(), bucket.getDocCount());
assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount());
});
}
}
public void testRandomMonthIntervals() throws IOException {
final int length = 60;
final List<DateTime> dataset = new ArrayList<>(length);
final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final DateTime date = startDate.plusMonths(i);
final ZonedDateTime date = startDate.plusMonths(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
@ -617,10 +623,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
public void testRandomYearIntervals() throws IOException {
final int length = 300;
final List<DateTime> dataset = new ArrayList<>(length);
final DateTime startDate = new DateTime(2017, 1, 1, 0, 0, DateTimeZone.UTC);
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final DateTime date = startDate.plusYears(i);
final ZonedDateTime date = startDate.plusYears(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
@ -646,12 +652,12 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalMinute() throws IOException {
final List<DateTime> datesForMinuteInterval = Arrays.asList(
new DateTime(2017, 2, 1, 9, 2, 35, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 9, 2, 59, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 9, 15, 37, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 9, 16, 4, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 9, 16, 42, DateTimeZone.UTC));
final List<ZonedDateTime> datesForMinuteInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 9, 2, 35, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 2, 59, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 15, 37, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 16, 4, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC));
testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
@ -668,10 +674,10 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD),
histogram -> {
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForMinuteInterval.get(0).withSecondOfMinute(0), 2);
expectedDocCount.put(datesForMinuteInterval.get(2).withSecondOfMinute(0), 1);
expectedDocCount.put(datesForMinuteInterval.get(3).withSecondOfMinute(0), 2);
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(datesForMinuteInterval.get(0).withSecond(0), 2);
expectedDocCount.put(datesForMinuteInterval.get(2).withSecond(0), 1);
expectedDocCount.put(datesForMinuteInterval.get(3).withSecond(0), 2);
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(15, buckets.size());
buckets.forEach(bucket ->
@ -681,15 +687,15 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
}
public void testIntervalSecond() throws IOException {
final List<DateTime> datesForSecondInterval = Arrays.asList(
new DateTime(2017, 2, 1, 0, 0, 5, 15, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 0, 0, 7, 299, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 0, 0, 7, 74, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 0, 0, 11, 688, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 0, 0, 11, 210, DateTimeZone.UTC),
new DateTime(2017, 2, 1, 0, 0, 11, 380, DateTimeZone.UTC));
final DateTime startDate = datesForSecondInterval.get(0).withMillisOfSecond(0);
final Map<DateTime, Integer> expectedDocCount = new HashMap<>();
final List<ZonedDateTime> datesForSecondInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 0, 0, 5, 15, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 299, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 74, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 688, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 210, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC));
final ZonedDateTime startDate = datesForSecondInterval.get(0).withNano(0);
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 1);
expectedDocCount.put(startDate.plusSeconds(2), 2);
expectedDocCount.put(startDate.plusSeconds(6), 3);
@ -712,19 +718,19 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
);
}
private void testSearchCase(final Query query, final List<DateTime> dataset,
private void testSearchCase(final Query query, final List<ZonedDateTime> dataset,
final Consumer<AutoDateHistogramAggregationBuilder> configure,
final Consumer<InternalAutoDateHistogram> verify) throws IOException {
executeTestCase(false, query, dataset, configure, verify);
}
private void testSearchAndReduceCase(final Query query, final List<DateTime> dataset,
private void testSearchAndReduceCase(final Query query, final List<ZonedDateTime> dataset,
final Consumer<AutoDateHistogramAggregationBuilder> configure,
final Consumer<InternalAutoDateHistogram> verify) throws IOException {
executeTestCase(true, query, dataset, configure, verify);
}
private void testBothCases(final Query query, final List<DateTime> dataset,
private void testBothCases(final Query query, final List<ZonedDateTime> dataset,
final Consumer<AutoDateHistogramAggregationBuilder> configure,
final Consumer<InternalAutoDateHistogram> verify) throws IOException {
executeTestCase(false, query, dataset, configure, verify);
@ -745,18 +751,18 @@ public class AutoDateHistogramAggregatorTests extends AggregatorTestCase {
);
}
private void executeTestCase(final boolean reduced, final Query query, final List<DateTime> dataset,
private void executeTestCase(final boolean reduced, final Query query, final List<ZonedDateTime> dataset,
final Consumer<AutoDateHistogramAggregationBuilder> configure,
final Consumer<InternalAutoDateHistogram> verify) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
final Document document = new Document();
for (final DateTime date : dataset) {
for (final ZonedDateTime date : dataset) {
if (frequently()) {
indexWriter.commit();
}
final long instant = date.getMillis();
final long instant = date.toInstant().toEpochMilli();
document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
document.add(new LongPoint(INSTANT_FIELD, instant));
indexWriter.addDocument(document);

View File

@ -30,6 +30,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
@ -474,6 +475,6 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
}
private static long asLong(String dateTime) {
return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseJoda(dateTime).getMillis();
return DateFormatters.toZonedDateTime(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}

Some files were not shown because too many files have changed in this diff Show More