Dates: Allow for negative unix timestamps

This fixes an issue to allow for negative unix timestamps.
An own printer for epochs instead of just having a parser has been added.
Added docs that only 10/13 length unix timestamps are supported
Added docs in upgrade documentation

Fixes #11478
This commit is contained in:
Alexander Reelsen 2015-06-22 11:56:31 +02:00
parent 895a7dd03c
commit 38ddc8159c
5 changed files with 173 additions and 68 deletions

View File

@ -26,6 +26,8 @@ import org.joda.time.field.OffsetDateTimeField;
import org.joda.time.field.ScaledDurationField;
import org.joda.time.format.*;
import java.io.IOException;
import java.io.Writer;
import java.util.Locale;
import java.util.regex.Pattern;
@ -135,9 +137,9 @@ public class Joda {
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
formatter = ISODateTimeFormat.yearMonthDay();
} else if ("epoch_second".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimeParser(false)).toFormatter();
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(false), new EpochTimeParser(false)).toFormatter();
} else if ("epoch_millis".equals(input)) {
formatter = new DateTimeFormatterBuilder().append(new EpochTimeParser(true)).toFormatter();
formatter = new DateTimeFormatterBuilder().append(new EpochTimePrinter(true), new EpochTimeParser(true)).toFormatter();
} else if (Strings.hasLength(input) && input.contains("||")) {
String[] formats = Strings.delimitedListToStringArray(input, "||");
DateTimeParser[] parsers = new DateTimeParser[formats.length];
@ -200,8 +202,8 @@ public class Joda {
public static class EpochTimeParser implements DateTimeParser {
private static final Pattern MILLI_SECOND_PRECISION_PATTERN = Pattern.compile("^\\d{1,13}$");
private static final Pattern SECOND_PRECISION_PATTERN = Pattern.compile("^\\d{1,10}$");
private static final Pattern MILLI_SECOND_PRECISION_PATTERN = Pattern.compile("^-?\\d{1,13}$");
private static final Pattern SECOND_PRECISION_PATTERN = Pattern.compile("^-?\\d{1,10}$");
private final boolean hasMilliSecondPrecision;
private final Pattern pattern;
@ -218,7 +220,10 @@ public class Joda {
@Override
public int parseInto(DateTimeParserBucket bucket, String text, int position) {
if (text.length() > estimateParsedLength() ||
boolean isPositive = text.startsWith("-") == false;
boolean isTooLong = text.length() > estimateParsedLength();
if ((isPositive && isTooLong) ||
// timestamps have to have UTC timezone
bucket.getZone() != DateTimeZone.UTC ||
pattern.matcher(text).matches() == false) {
@ -242,5 +247,66 @@ public class Joda {
}
return text.length();
}
};
}
public static class EpochTimePrinter implements DateTimePrinter {
private boolean hasMilliSecondPrecision;
public EpochTimePrinter(boolean hasMilliSecondPrecision) {
this.hasMilliSecondPrecision = hasMilliSecondPrecision;
}
@Override
public int estimatePrintedLength() {
return hasMilliSecondPrecision ? 13 : 10;
}
@Override
public void printTo(StringBuffer buf, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) {
if (hasMilliSecondPrecision) {
buf.append(instant);
} else {
buf.append(instant / 1000);
}
}
@Override
public void printTo(Writer out, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.write(String.valueOf(instant));
} else {
out.append(String.valueOf(instant / 1000));
}
}
@Override
public void printTo(StringBuffer buf, ReadablePartial partial, Locale locale) {
if (hasMilliSecondPrecision) {
buf.append(String.valueOf(getDateTimeMillis(partial)));
} else {
buf.append(String.valueOf(getDateTimeMillis(partial) / 1000));
}
}
@Override
public void printTo(Writer out, ReadablePartial partial, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.append(String.valueOf(getDateTimeMillis(partial)));
} else {
out.append(String.valueOf(getDateTimeMillis(partial) / 1000));
}
}
private long getDateTimeMillis(ReadablePartial partial) {
int year = partial.get(DateTimeFieldType.year());
int monthOfYear = partial.get(DateTimeFieldType.monthOfYear());
int dayOfMonth = partial.get(DateTimeFieldType.dayOfMonth());
int hourOfDay = partial.get(DateTimeFieldType.hourOfDay());
int minuteOfHour = partial.get(DateTimeFieldType.minuteOfHour());
int secondOfMinute = partial.get(DateTimeFieldType.secondOfMinute());
int millisOfSecond = partial.get(DateTimeFieldType.millisOfSecond());
return partial.getChronology().getDateTimeMillis(year, monthOfYear, dayOfMonth, hourOfDay, minuteOfHour, secondOfMinute, millisOfSecond);
}
}
}

View File

@ -25,11 +25,13 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDateTime;
import org.joda.time.MutableDateTime;
import org.joda.time.format.*;
import org.junit.Test;
import java.util.Date;
import java.util.Locale;
import static org.hamcrest.Matchers.*;
@ -250,7 +252,7 @@ public class SimpleJodaTests extends ElasticsearchTestCase {
}
@Test
public void testThatEpochsInSecondsCanBeParsed() {
public void testThatEpochsCanBeParsed() {
boolean parseMilliSeconds = randomBoolean();
// epoch: 1433144433655 => date: Mon Jun 1 09:40:33.655 CEST 2015
@ -271,6 +273,37 @@ public class SimpleJodaTests extends ElasticsearchTestCase {
}
}
@Test
public void testThatNegativeEpochsCanBeParsed() {
// problem: negative epochs can be arbitrary in size...
boolean parseMilliSeconds = randomBoolean();
FormatDateTimeFormatter formatter = Joda.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second");
DateTime dateTime = formatter.parser().parseDateTime("-10000");
assertThat(dateTime.getYear(), is(1969));
assertThat(dateTime.getMonthOfYear(), is(12));
assertThat(dateTime.getDayOfMonth(), is(31));
if (parseMilliSeconds) {
assertThat(dateTime.getHourOfDay(), is(23)); // utc timezone, +2 offset due to CEST
assertThat(dateTime.getMinuteOfHour(), is(59));
assertThat(dateTime.getSecondOfMinute(), is(50));
} else {
assertThat(dateTime.getHourOfDay(), is(21)); // utc timezone, +2 offset due to CEST
assertThat(dateTime.getMinuteOfHour(), is(13));
assertThat(dateTime.getSecondOfMinute(), is(20));
}
// every negative epoch must be parsed, no matter if exact the size or bigger
if (parseMilliSeconds) {
formatter.parser().parseDateTime("-100000000");
formatter.parser().parseDateTime("-999999999999");
formatter.parser().parseDateTime("-1234567890123");
} else {
formatter.parser().parseDateTime("-100000000");
formatter.parser().parseDateTime("-1234567890");
}
}
@Test(expected = IllegalArgumentException.class)
public void testForInvalidDatesInEpochSecond() {
FormatDateTimeFormatter formatter = Joda.forPattern("epoch_second");
@ -283,6 +316,51 @@ public class SimpleJodaTests extends ElasticsearchTestCase {
formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901234"));
}
public void testThatEpochParserIsPrinter() {
FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis");
assertThat(formatter.parser().isPrinter(), is(true));
assertThat(formatter.printer().isPrinter(), is(true));
FormatDateTimeFormatter epochSecondFormatter = Joda.forPattern("epoch_second");
assertThat(epochSecondFormatter.parser().isPrinter(), is(true));
assertThat(epochSecondFormatter.printer().isPrinter(), is(true));
}
public void testThatEpochTimePrinterWorks() {
StringBuffer buffer = new StringBuffer();
LocalDateTime now = LocalDateTime.now();
Joda.EpochTimePrinter epochTimePrinter = new Joda.EpochTimePrinter(false);
epochTimePrinter.printTo(buffer, now, Locale.ROOT);
assertThat(buffer.length(), is(10));
// only check the last digit, as seconds go from 0-99 in the unix timestamp and dont stop at 60
assertThat(buffer.toString(), endsWith(String.valueOf(now.getSecondOfMinute() % 10)));
buffer = new StringBuffer();
Joda.EpochTimePrinter epochMilliSecondTimePrinter = new Joda.EpochTimePrinter(true);
epochMilliSecondTimePrinter.printTo(buffer, now, Locale.ROOT);
assertThat(buffer.length(), is(13));
assertThat(buffer.toString(), endsWith(String.valueOf(now.getMillisOfSecond())));
}
public void testThatEpochParserIsIdempotent() {
FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis");
DateTime dateTime = formatter.parser().parseDateTime("1234567890123");
assertThat(dateTime.getMillis(), is(1234567890123l));
dateTime = formatter.printer().parseDateTime("1234567890456");
assertThat(dateTime.getMillis(), is(1234567890456l));
dateTime = formatter.parser().parseDateTime("1234567890789");
assertThat(dateTime.getMillis(), is(1234567890789l));
FormatDateTimeFormatter secondsFormatter = Joda.forPattern("epoch_second");
DateTime secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890");
assertThat(secondsDateTime.getMillis(), is(1234567890000l));
secondsDateTime = secondsFormatter.printer().parseDateTime("1234567890");
assertThat(secondsDateTime.getMillis(), is(1234567890000l));
secondsDateTime = secondsFormatter.parser().parseDateTime("1234567890");
assertThat(secondsDateTime.getMillis(), is(1234567890000l));
}
private long utcTimeInMillis(String time) {
return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time);
}

View File

@ -33,15 +33,9 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.*;
import org.elasticsearch.index.query.CommonTermsQueryBuilder.Operator;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder.Type;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.SearchHit;
@ -62,58 +56,11 @@ import java.util.concurrent.ExecutionException;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.andQuery;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.existsQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import static org.elasticsearch.index.query.QueryBuilders.indicesQuery;
import static org.elasticsearch.index.query.QueryBuilders.limitQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.missingQuery;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.index.query.QueryBuilders.regexpQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanMultiTermQueryBuilder;
import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanOrQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.termsLookupQuery;
import static org.elasticsearch.index.query.QueryBuilders.termsQuery;
import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
import static org.elasticsearch.index.query.QueryBuilders.wrapperQuery;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
@Slow
public class SearchQueryTests extends ElasticsearchIntegrationTest {
@ -2195,11 +2142,10 @@ functionScoreQuery(scriptFunction(new Script("_doc['score'].value")))).setMinSco
}
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/11478")
@Test
public void testDateProvidedAsNumber() throws ExecutionException, InterruptedException {
createIndex("test");
assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource("field", "type=date").get());
assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource("field", "type=date,format=epoch_millis").get());
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("field", -1000000000001L),
client().prepareIndex("test", "type", "2").setSource("field", -1000000000000L),
client().prepareIndex("test", "type", "3").setSource("field", -999999999999L));

View File

@ -200,9 +200,14 @@ year.
year, and two digit day of month.
|`epoch_second`|A formatter for the number of seconds since the epoch.
Note, that this timestamp allows a max length of 10 chars, so dates
older than 1653 and 2286 are not supported. You should use a different
date formatter in that case.
|`epoch_millis`|A formatter for the number of milliseconds since
the epoch.
|`epoch_millis`|A formatter for the number of milliseconds since the epoch.
Note, that this timestamp allows a max length of 13 chars, so dates
older than 1653 and 2286 are not supported. You should use a different
date formatter in that case.
|=======================================================================
[float]

View File

@ -301,6 +301,16 @@ Meta fields can no longer be specified within a document. They should be specifi
via the API. For example, instead of adding a field `_parent` within a document,
use the `parent` url parameter when indexing that document.
==== Date format does not support unix timestamps by default
In earlier versions of elasticsearch, every timestamp was always tried to be parsed as
as unix timestamp first. This means, even when specifying a date format like
`dateOptionalTime`, one could supply unix timestamps instead of a ISO8601 formatted
date.
This is not supported anymore. If you want to store unix timestamps, you need to specify
the appropriate formats in the mapping, namely `epoch_second` or `epoch_millis`.
==== Source field limitations
The `_source` field could previously be disabled dynamically. Since this field
is a critical piece of many features like the Update API, it is no longer