Date Parsing: Add parsing for epoch and epoch in milliseconds

This commit changes the date handling. First and foremost Elasticsearch
does not try to convert every date to a unix timestamp first and then
uses the configured date. This now allows for dates like `2015121212` to
be parsed correctly.

Instead it is now explicit by adding a `epoch_second` and `epoch_millis`
date format. This also means, that the default date format now is
`epoch_millis||dateOptionalTime` to remain backwards compatible.

Closes #5328
Relates #10971
This commit is contained in:
Alexander Reelsen 2015-06-03 18:07:47 +02:00
parent 5fd96d9371
commit 01e8eaf181
18 changed files with 228 additions and 100 deletions

View File

@ -198,6 +198,11 @@ year.
|`year_month_day`|A formatter for a four digit year, two digit month of
year, and two digit day of month.
|`epoch_second`|A formatter for the number of seconds since the epoch.
|`epoch_millis`|A formatter for the number of milliseconds since
the epoch.
|=======================================================================
[float]

View File

@ -79,7 +79,7 @@ format>> used to parse the provided timestamp value. For example:
}
--------------------------------------------------
Note, the default format is `dateOptionalTime`. The timestamp value will
Note, the default format is `epoch_millis||dateOptionalTime`. The timestamp value will
first be parsed as a number and if it fails the format will be tried.
[float]

View File

@ -349,7 +349,7 @@ date type:
Defaults to the property/field name.
|`format` |The <<mapping-date-format,date
format>>. Defaults to `dateOptionalTime`.
format>>. Defaults to `epoch_millis||dateOptionalTime`.
|`store` |Set to `true` to store actual field in the index, `false` to not
store it. Defaults to `false` (note, the JSON document itself is stored,

View File

@ -42,8 +42,8 @@ and will use the matching format as its format attribute. The date
format itself is explained
<<mapping-date-format,here>>.
The default formats are: `dateOptionalTime` (ISO) and
`yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z`.
The default formats are: `dateOptionalTime` (ISO),
`yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z` and `epoch_millis`.
*Note:* `dynamic_date_formats` are used *only* for dynamically added
date fields, not for `date` fields that you specify in your mapping.

View File

@ -32,6 +32,11 @@ public class TimestampParsingException extends ElasticsearchException {
this.timestamp = timestamp;
}
public TimestampParsingException(String timestamp, Throwable cause) {
super("failed to parse timestamp [" + timestamp + "]", cause);
this.timestamp = timestamp;
}
public String timestamp() {
return timestamp;
}

View File

@ -161,19 +161,11 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
public static class Timestamp {
public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException {
long ts;
try {
// if we manage to parse it, its a millisecond timestamp, just return the string as is
ts = Long.parseLong(timestampAsString);
return timestampAsString;
} catch (NumberFormatException e) {
try {
ts = dateTimeFormatter.parser().parseMillis(timestampAsString);
} catch (RuntimeException e1) {
throw new TimestampParsingException(timestampAsString);
}
return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString));
} catch (RuntimeException e) {
throw new TimestampParsingException(timestampAsString, e);
}
return Long.toString(ts);
}

View File

@ -19,14 +19,14 @@
package org.elasticsearch.common.joda;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.format.DateTimeFormatter;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* A parser for date/time formatted text with optional date math.
@ -38,13 +38,10 @@ import java.util.concurrent.TimeUnit;
public class DateMathParser {
private final FormatDateTimeFormatter dateTimeFormatter;
private final TimeUnit timeUnit;
public DateMathParser(FormatDateTimeFormatter dateTimeFormatter, TimeUnit timeUnit) {
if (dateTimeFormatter == null) throw new NullPointerException();
if (timeUnit == null) throw new NullPointerException();
public DateMathParser(FormatDateTimeFormatter dateTimeFormatter) {
checkNotNull(dateTimeFormatter);
this.dateTimeFormatter = dateTimeFormatter;
this.timeUnit = timeUnit;
}
public long parse(String text, Callable<Long> now) {
@ -195,17 +192,6 @@ public class DateMathParser {
}
private long parseDateTime(String value, DateTimeZone timeZone) {
// first check for timestamp
if (value.length() > 4 && StringUtils.isNumeric(value)) {
try {
long time = Long.parseLong(value);
return timeUnit.toMillis(time);
} catch (NumberFormatException e) {
throw new ElasticsearchParseException("failed to parse date field [" + value + "] as timestamp", e);
}
}
DateTimeFormatter parser = dateTimeFormatter.parser();
if (timeZone != null) {
parser = parser.withZone(timeZone);

View File

@ -27,6 +27,7 @@ import org.joda.time.field.ScaledDurationField;
import org.joda.time.format.*;
import java.util.Locale;
import java.util.regex.Pattern;
/**
*
@ -133,6 +134,10 @@ public class Joda {
formatter = ISODateTimeFormat.yearMonth();
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
formatter = ISODateTimeFormat.yearMonthDay();
} else if ("epoch_second".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimeParser(false)).toFormatter();
} else if ("epoch_millis".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimeParser(true)).toFormatter();
} else if (Strings.hasLength(input) && input.contains("||")) {
String[] formats = Strings.delimitedListToStringArray(input, "||");
DateTimeParser[] parsers = new DateTimeParser[formats.length];
@ -192,4 +197,50 @@ public class Joda {
return new OffsetDateTimeField(new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1);
}
};
public static class EpochTimeParser implements DateTimeParser {
private static final Pattern MILLI_SECOND_PRECISION_PATTERN = Pattern.compile("^\\d{1,13}$");
private static final Pattern SECOND_PRECISION_PATTERN = Pattern.compile("^\\d{1,10}$");
private final boolean hasMilliSecondPrecision;
private final Pattern pattern;
public EpochTimeParser(boolean hasMilliSecondPrecision) {
this.hasMilliSecondPrecision = hasMilliSecondPrecision;
this.pattern = hasMilliSecondPrecision ? MILLI_SECOND_PRECISION_PATTERN : SECOND_PRECISION_PATTERN;
}
@Override
public int estimateParsedLength() {
return hasMilliSecondPrecision ? 13 : 10;
}
@Override
public int parseInto(DateTimeParserBucket bucket, String text, int position) {
if (text.length() > estimateParsedLength() ||
// timestamps have to have UTC timezone
bucket.getZone() != DateTimeZone.UTC ||
pattern.matcher(text).matches() == false) {
return -1;
}
int factor = hasMilliSecondPrecision ? 1 : 1000;
try {
long millis = Long.valueOf(text) * factor;
DateTime dt = new DateTime(millis, DateTimeZone.UTC);
bucket.saveField(DateTimeFieldType.year(), dt.getYear());
bucket.saveField(DateTimeFieldType.monthOfYear(), dt.getMonthOfYear());
bucket.saveField(DateTimeFieldType.dayOfMonth(), dt.getDayOfMonth());
bucket.saveField(DateTimeFieldType.hourOfDay(), dt.getHourOfDay());
bucket.saveField(DateTimeFieldType.minuteOfHour(), dt.getMinuteOfHour());
bucket.saveField(DateTimeFieldType.secondOfMinute(), dt.getSecondOfMinute());
bucket.saveField(DateTimeFieldType.millisOfSecond(), dt.getMillisOfSecond());
bucket.setZone(DateTimeZone.UTC);
} catch (Exception e) {
return -1;
}
return text.length();
}
};
}

View File

@ -46,12 +46,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.internal.SearchContext;
@ -223,7 +218,7 @@ public class DateFieldMapper extends NumberFieldMapper {
protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER;
protected TimeUnit timeUnit = Defaults.TIME_UNIT;
protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter);
public DateFieldType() {}
@ -245,7 +240,7 @@ public class DateFieldMapper extends NumberFieldMapper {
public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) {
checkIfFrozen();
this.dateTimeFormatter = dateTimeFormatter;
this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
this.dateMathParser = new DateMathParser(dateTimeFormatter);
}
public TimeUnit timeUnit() {
@ -255,7 +250,7 @@ public class DateFieldMapper extends NumberFieldMapper {
public void setTimeUnit(TimeUnit timeUnit) {
checkIfFrozen();
this.timeUnit = timeUnit;
this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit);
this.dateMathParser = new DateMathParser(dateTimeFormatter);
}
protected DateMathParser dateMathParser() {
@ -365,9 +360,6 @@ public class DateFieldMapper extends NumberFieldMapper {
}
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
if (value instanceof Number) {
return ((Number) value).longValue();
}
DateMathParser dateParser = dateMathParser();
if (forcedDateParser != null) {
dateParser = forcedDateParser;
@ -434,17 +426,12 @@ public class DateFieldMapper extends NumberFieldMapper {
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
String dateAsString = null;
Long value = null;
float boost = this.fieldType.boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue instanceof Number) {
value = ((Number) externalValue).longValue();
} else {
dateAsString = (String) externalValue;
if (dateAsString == null) {
dateAsString = nullValue;
}
dateAsString = (String) externalValue;
if (dateAsString == null) {
dateAsString = nullValue;
}
} else {
XContentParser parser = context.parser();
@ -452,7 +439,7 @@ public class DateFieldMapper extends NumberFieldMapper {
if (token == XContentParser.Token.VALUE_NULL) {
dateAsString = nullValue;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.longValue(coerce.value());
dateAsString = parser.text();
} else if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -462,8 +449,6 @@ public class DateFieldMapper extends NumberFieldMapper {
if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) {
if (token == XContentParser.Token.VALUE_NULL) {
dateAsString = nullValue;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.longValue(coerce.value());
} else {
dateAsString = parser.text();
}
@ -479,14 +464,12 @@ public class DateFieldMapper extends NumberFieldMapper {
}
}
Long value = null;
if (dateAsString != null) {
assert value == null;
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType.names().fullName(), dateAsString, boost);
}
value = fieldType().parseStringValue(dateAsString);
} else if (value != null) {
value = ((DateFieldType)fieldType).timeUnit().toMillis(value);
}
if (value != null) {

View File

@ -58,7 +58,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
public static final String NAME = "_timestamp";
public static final String CONTENT_TYPE = "_timestamp";
public static final String DEFAULT_DATE_TIME_FORMAT = "dateOptionalTime";
public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||dateOptionalTime";
public static class Defaults extends DateFieldMapper.Defaults {
public static final String NAME = "_timestamp";

View File

@ -102,7 +102,7 @@ public class RangeQueryParser implements QueryParser {
} else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) {
timeZone = DateTimeZone.forID(parser.text());
} else if ("format".equals(currentFieldName)) {
forcedDateParser = new DateMathParser(Joda.forPattern(parser.text()), DateFieldMapper.Defaults.TIME_UNIT);
forcedDateParser = new DateMathParser(Joda.forPattern(parser.text()));
} else {
throw new QueryParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]");
}
@ -123,11 +123,6 @@ public class RangeQueryParser implements QueryParser {
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
if (mapper instanceof DateFieldMapper) {
if ((from instanceof Number || to instanceof Number) && timeZone != null) {
throw new QueryParsingException(parseContext,
"[range] time_zone when using ms since epoch format as it's UTC based can not be applied to [" + fieldName
+ "]");
}
query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext);
} else {
if (timeZone != null) {

View File

@ -68,7 +68,7 @@ public class ValueFormat {
public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT);
public static DateTime format(String format) {
return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format, DateFieldMapper.Defaults.TIME_UNIT));
return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format));
}
public static DateTime mapper(DateFieldMapper mapper) {

View File

@ -32,7 +32,6 @@ import java.text.NumberFormat;
import java.text.ParseException;
import java.util.Locale;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
/**
*
@ -81,12 +80,12 @@ public interface ValueParser {
*/
static class DateMath implements ValueParser {
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT));
public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER));
private DateMathParser parser;
public DateMath(String format, TimeUnit timeUnit) {
this(new DateMathParser(Joda.forPattern(format), timeUnit));
public DateMath(String format) {
this(new DateMathParser(Joda.forPattern(format)));
}
public DateMath(DateMathParser parser) {
@ -110,7 +109,7 @@ public interface ValueParser {
}
public static DateMath mapper(DateFieldMapper mapper) {
return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT));
return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter()));
}
}

View File

@ -23,16 +23,18 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.joda.time.DateTimeZone;
import org.junit.Test;
import java.util.TimeZone;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.hamcrest.Matchers.equalTo;
public class DateMathParserTests extends ElasticsearchTestCase {
FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime");
DateMathParser parser = new DateMathParser(formatter, TimeUnit.MILLISECONDS);
FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis");
DateMathParser parser = new DateMathParser(formatter);
private static Callable<Long> callable(final long value) {
return new Callable<Long>() {
@ -195,25 +197,22 @@ public class DateMathParserTests extends ElasticsearchTestCase {
public void testTimestamps() {
assertDateMathEquals("1418248078000", "2014-12-10T21:47:58.000");
// timezone does not affect timestamps
assertDateMathEquals("1418248078000", "2014-12-10T21:47:58.000", 0, false, DateTimeZone.forID("-08:00"));
// datemath still works on timestamps
assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000");
// also check other time units
DateMathParser parser = new DateMathParser(Joda.forPattern("dateOptionalTime"), TimeUnit.SECONDS);
DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_second||dateOptionalTime"));
long datetime = parser.parse("1418248078", callable(0));
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
// a timestamp before 10000 is a year
assertDateMathEquals("9999", "9999-01-01T00:00:00.000");
// 10000 is the first timestamp
assertDateMathEquals("10000", "1970-01-01T00:00:10.000");
// 10000 is also a year, breaking bwc, used to be a timestamp
assertDateMathEquals("10000", "10000-01-01T00:00:00.000");
// but 10000 with T is still a date format
assertDateMathEquals("10000T", "10000-01-01T00:00:00.000");
}
void assertParseException(String msg, String date, String exc) {
try {
parser.parse(date, callable(0));
@ -232,7 +231,7 @@ public class DateMathParserTests extends ElasticsearchTestCase {
}
public void testIllegalDateFormat() {
assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "timestamp");
assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "failed to parse date field");
assertParseException("Expected bad date format exception", "123bogus", "with format");
}
@ -250,4 +249,10 @@ public class DateMathParserTests extends ElasticsearchTestCase {
parser.parse("now/d", now, false, null);
assertTrue(called.get());
}
@Test(expected = ElasticsearchParseException.class)
public void testThatUnixTimestampMayNotHaveTimeZone() {
DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_millis"));
parser.parse("1234567890123", callable(42), false, DateTimeZone.forTimeZone(TimeZone.getTimeZone("CET")));
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.count.simple;
import org.apache.lucene.util.Constants;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
@ -39,6 +40,7 @@ import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.is;
public class SimpleCountTests extends ElasticsearchIntegrationTest {
@ -177,4 +179,46 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest {
assertHitCount(countResponse, 20l);
}
}
@Test
public void testThatNonEpochDatesCanBeSearch() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1",
jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject()
.endObject().endObject()));
ensureGreen("test");
XContentBuilder document = jsonBuilder()
.startObject()
.field("date_field", "2015060210")
.endObject();
assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true));
document = jsonBuilder()
.startObject()
.field("date_field", "2014060210")
.endObject();
assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true));
// this is a timestamp in 2015 and should not be returned in counting when filtering by year
document = jsonBuilder()
.startObject()
.field("date_field", "1433236702")
.endObject();
assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true));
refresh();
assertHitCount(client().prepareCount("test").get(), 3);
CountResponse countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from("2015010100").to("2015123123")).get();
assertHitCount(countResponse, 1);
countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from(2015010100).to(2015123123)).get();
assertHitCount(countResponse, 1);
countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from(2015010100).to(2015123123).timeZone("UTC")).get();
assertHitCount(countResponse, 1);
}
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime;
import org.joda.time.format.*;
@ -248,6 +249,40 @@ public class SimpleJodaTests extends ElasticsearchTestCase {
assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis()));
}
@Test
public void testThatEpochsInSecondsCanBeParsed() {
boolean parseMilliSeconds = randomBoolean();
// epoch: 1433144433655 => date: Mon Jun 1 09:40:33.655 CEST 2015
FormatDateTimeFormatter formatter = Joda.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second");
DateTime dateTime = formatter.parser().parseDateTime(parseMilliSeconds ? "1433144433655" : "1433144433");
assertThat(dateTime.getYear(), is(2015));
assertThat(dateTime.getDayOfMonth(), is(1));
assertThat(dateTime.getMonthOfYear(), is(6));
assertThat(dateTime.getHourOfDay(), is(7)); // utc timezone, +2 offset due to CEST
assertThat(dateTime.getMinuteOfHour(), is(40));
assertThat(dateTime.getSecondOfMinute(), is(33));
if (parseMilliSeconds) {
assertThat(dateTime.getMillisOfSecond(), is(655));
} else {
assertThat(dateTime.getMillisOfSecond(), is(0));
}
}
@Test(expected = IllegalArgumentException.class)
public void testForInvalidDatesInEpochSecond() {
FormatDateTimeFormatter formatter = Joda.forPattern("epoch_second");
formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901", "12345678901234"));
}
@Test(expected = IllegalArgumentException.class)
public void testForInvalidDatesInEpochMillis() {
FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis");
formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901234"));
}
private long utcTimeInMillis(String time) {
return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time);
}

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.util.Constants;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.LocaleUtils;
@ -33,13 +34,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
@ -51,21 +47,12 @@ import org.joda.time.DateTimeZone;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.*;
import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.index.mapper.string.SimpleStringMappingTests.docValuesType;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.*;
public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
@ -439,4 +426,31 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.bytes());
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L));
}
public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper("type", mapping);
XContentBuilder document = XContentFactory.jsonBuilder()
.startObject()
.field("date_field", "2015060210")
.endObject();
ParsedDocument doc = defaultMapper.parse("type", "1", document.bytes());
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L));
IndexResponse indexResponse = client().prepareIndex("test", "test").setSource(document).get();
assertThat(indexResponse.isCreated(), is(true));
// integers should always be parsed as well... cannot be sure it is a unix timestamp only
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("date_field", 2015060210)
.endObject()
.bytes());
assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L));
indexResponse = client().prepareIndex("test", "test").setSource(document).get();
assertThat(indexResponse.isCreated(), is(true));
}
}

View File

@ -775,4 +775,18 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY")), request.timestamp());
assertNull(docMapper.parse("type", "1", doc.bytes()).rootDoc().get("_timestamp"));
}
public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").field("path", "custom_timestamp").endObject()
.endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 2015060210).endObject();
IndexRequest request = new IndexRequest("test", "type", "1").source(doc);
MappingMetaData mappingMetaData = new MappingMetaData(docMapper);
request.process(MetaData.builder().build(), mappingMetaData, true, "test");
assertThat(request.timestamp(), is("1433239200000"));
}
}