`epoch_millis` and `epoch_second` date formats truncate float values, as numbers or as strings. The `coerce` parameter is not defined for `date` field type and this is not changing. See PR #26119 Closes #14641
This commit is contained in:
parent
10c3c1aef0
commit
3e3132fe3f
|
@ -42,6 +42,7 @@ import org.joda.time.format.StrictISODateTimeFormat;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.Locale;
|
||||
|
||||
public class Joda {
|
||||
|
@ -331,7 +332,8 @@ public class Joda {
|
|||
@Override
|
||||
public int parseInto(DateTimeParserBucket bucket, String text, int position) {
|
||||
boolean isPositive = text.startsWith("-") == false;
|
||||
boolean isTooLong = text.length() > estimateParsedLength();
|
||||
int firstDotIndex = text.indexOf('.');
|
||||
boolean isTooLong = (firstDotIndex == -1 ? text.length() : firstDotIndex) > estimateParsedLength();
|
||||
|
||||
if (bucket.getZone() != DateTimeZone.UTC) {
|
||||
String format = hasMilliSecondPrecision ? "epoch_millis" : "epoch_second";
|
||||
|
@ -342,7 +344,7 @@ public class Joda {
|
|||
|
||||
int factor = hasMilliSecondPrecision ? 1 : 1000;
|
||||
try {
|
||||
long millis = Long.valueOf(text) * factor;
|
||||
long millis = new BigDecimal(text).longValue() * factor;
|
||||
DateTime dt = new DateTime(millis, DateTimeZone.UTC);
|
||||
bucket.saveField(DateTimeFieldType.year(), dt.getYear());
|
||||
bucket.saveField(DateTimeFieldType.monthOfYear(), dt.getMonthOfYear());
|
||||
|
|
|
@ -260,6 +260,10 @@ public class SimpleJodaTests extends ESTestCase {
|
|||
} else {
|
||||
assertThat(dateTime.getMillisOfSecond(), is(0));
|
||||
}
|
||||
|
||||
// test floats get truncated
|
||||
String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L), randomNonNegativeLong());
|
||||
assertThat(formatter.parser().parseDateTime(epochFloatValue).getMillis(), is(dateTime.getMillis()));
|
||||
}
|
||||
|
||||
public void testThatNegativeEpochsCanBeParsed() {
|
||||
|
@ -281,16 +285,26 @@ public class SimpleJodaTests extends ESTestCase {
|
|||
assertThat(dateTime.getSecondOfMinute(), is(20));
|
||||
}
|
||||
|
||||
// test floats get truncated
|
||||
String epochFloatValue = String.format(Locale.US, "%d.%d", dateTime.getMillis() / (parseMilliSeconds ? 1L : 1000L), randomNonNegativeLong());
|
||||
assertThat(formatter.parser().parseDateTime(epochFloatValue).getMillis(), is(dateTime.getMillis()));
|
||||
|
||||
// every negative epoch must be parsed, no matter if exact the size or bigger
|
||||
if (parseMilliSeconds) {
|
||||
formatter.parser().parseDateTime("-100000000");
|
||||
formatter.parser().parseDateTime("-999999999999");
|
||||
formatter.parser().parseDateTime("-1234567890123");
|
||||
formatter.parser().parseDateTime("-1234567890123456789");
|
||||
|
||||
formatter.parser().parseDateTime("-1234567890123.9999");
|
||||
formatter.parser().parseDateTime("-1234567890123456789.9999");
|
||||
} else {
|
||||
formatter.parser().parseDateTime("-100000000");
|
||||
formatter.parser().parseDateTime("-1234567890");
|
||||
formatter.parser().parseDateTime("-1234567890123456");
|
||||
|
||||
formatter.parser().parseDateTime("-1234567890.9999");
|
||||
formatter.parser().parseDateTime("-1234567890123456.9999");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.junit.Before;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
@ -214,6 +215,32 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertEquals(1457654400000L, pointField.numericValue().longValue());
|
||||
}
|
||||
|
||||
public void testFloatEpochFormat() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "date")
|
||||
.field("format", "epoch_millis").endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
double epochFloatMillisFromEpoch = (randomDouble() * 2 - 1) * 1000000;
|
||||
String epochFloatValue = String.format(Locale.US, "%f", epochFloatMillisFromEpoch);
|
||||
|
||||
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", epochFloatValue)
|
||||
.endObject()
|
||||
.bytes(),
|
||||
XContentType.JSON));
|
||||
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertEquals(2, fields.length);
|
||||
IndexableField pointField = fields[0];
|
||||
assertEquals((long)epochFloatMillisFromEpoch, pointField.numericValue().longValue());
|
||||
}
|
||||
|
||||
public void testChangeLocale() throws IOException {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "date").field("locale", "fr").endObject().endObject()
|
||||
|
|
|
@ -245,24 +245,24 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
|||
IndexableField pointField = fields[1];
|
||||
assertEquals(2, pointField.fieldType().pointDimensionCount());
|
||||
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping.string()));
|
||||
// date_range ignores the coerce parameter and epoch_millis date format truncates floats (see issue: #14641)
|
||||
if (type.equals("date_range") == false) {
|
||||
|
||||
assertEquals(mapping.string(), mapper2.mappingSource().toString());
|
||||
mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field")
|
||||
.field("type", type).field("coerce", false).endObject().endObject().endObject().endObject();
|
||||
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping.string()));
|
||||
|
||||
ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("field")
|
||||
.field(getFromField(), "5.2")
|
||||
.field(getToField(), "10")
|
||||
.endObject()
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
|
||||
assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"),
|
||||
containsString("failed to parse date"), containsString("is not an IP string literal")));
|
||||
assertEquals(mapping.string(), mapper2.mappingSource().toString());
|
||||
|
||||
ThrowingRunnable runnable = () -> mapper2
|
||||
.parse(SourceToParse.source(
|
||||
"test", "type", "1", XContentFactory.jsonBuilder().startObject().startObject("field")
|
||||
.field(getFromField(), "5.2").field(getToField(), "10").endObject().endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
|
||||
assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date"),
|
||||
containsString("is not an IP string literal")));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1014,17 +1014,20 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
|
||||
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
|
||||
|
||||
// however, e-notation should and fractional parts provided as string
|
||||
// should be parsed and error if not compatible
|
||||
Exception e = expectThrows(Exception.class, () -> client().prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange("1.0e3", "3.0e3").addRange("3.0e3", "4.0e3")).get());
|
||||
assertThat(e.getCause(), instanceOf(ElasticsearchParseException.class));
|
||||
assertEquals("failed to parse date field [1.0e3] with format [epoch_second]", e.getCause().getMessage());
|
||||
// also e-notation and floats provided as string also be truncated (see: #14641)
|
||||
searchResponse = client().prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange("1.0e3", "3.0e3").addRange("3.0e3", "4.0e3")).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
|
||||
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
|
||||
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
|
||||
|
||||
e = expectThrows(Exception.class, () -> client().prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange("1000.123", "3000.8").addRange("3000.8", "4000.3")).get());
|
||||
assertThat(e.getCause(), instanceOf(ElasticsearchParseException.class));
|
||||
assertEquals("failed to parse date field [1000.123] with format [epoch_second]", e.getCause().getMessage());
|
||||
searchResponse = client().prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange("1000.123", "3000.8").addRange("3000.8", "4000.3")).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
|
||||
buckets = checkBuckets(searchResponse.getAggregations().get("date_range"), "date_range", 2);
|
||||
assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L);
|
||||
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
|
||||
|
||||
// using different format should work when to/from is compatible with
|
||||
// format in aggregation
|
||||
|
|
|
@ -1699,11 +1699,15 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource("field", "type=date,format=epoch_millis").get());
|
||||
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("field", -1000000000001L),
|
||||
client().prepareIndex("test", "type", "2").setSource("field", -1000000000000L),
|
||||
client().prepareIndex("test", "type", "3").setSource("field", -999999999999L));
|
||||
client().prepareIndex("test", "type", "3").setSource("field", -999999999999L),
|
||||
client().prepareIndex("test", "type", "4").setSource("field", -1000000000001.0123456789),
|
||||
client().prepareIndex("test", "type", "5").setSource("field", -1000000000000.0123456789),
|
||||
client().prepareIndex("test", "type", "6").setSource("field", -999999999999.0123456789));
|
||||
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-1000000000000L)).get(), 2);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-999999999999L)).get(), 3);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-1000000000000L)).get(), 4);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").lte(-999999999999L)).get(), 6);
|
||||
|
||||
}
|
||||
|
||||
public void testRangeQueryWithTimeZone() throws Exception {
|
||||
|
|
Loading…
Reference in New Issue