Currently the rounding used in range queries can behave differently for `date` and `date_range` as explained in #50009. The behaviour on `date` fields is the one we document in https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-range-query.html#range-query-date-math-rounding. This change adapts the rounding behaviour for RangeType.DATE so it uses the same logic as the `date` for the `date_range` type. Backport of #50237
This commit is contained in:
parent
e372854d43
commit
86f3b47299
|
@ -50,3 +50,18 @@ will fail to start.
|
|||
|
||||
The `order` config of authentication realms must be unique in version 8.0.0.
|
||||
If you configure more than one realm of any type with the same order, the node will fail to start.
|
||||
|
||||
[discrete]
|
||||
[[breaking_77_search_changes]]
|
||||
=== Search changes
|
||||
|
||||
[discrete]
|
||||
==== Consistent rounding of range queries on `date_range` fields
|
||||
`range` queries on `date_range` field currently can have slightly differently
|
||||
boundaries than their equivalent query on a pure `date` field. This can e.g.
|
||||
happen when using date math or dates that don't specify up to the last
|
||||
millisecond. While queries on `date` field round up to the latest millisecond
|
||||
for `gt` and `lte` boundaries, the same queries on `date_range` fields didn't
|
||||
do this. The behavior is now the same for both field types like documented in
|
||||
<<range-query-date-math-rounding>>.
|
||||
|
||||
|
|
|
@ -391,3 +391,59 @@ setup:
|
|||
body: { "size" : 0, "query" : { "range" : { "date_range" : { "gte": "2017-09-03", "lte" : "2017-09-04", "relation": "within" } } } }
|
||||
|
||||
- match: { hits.total: 0 }
|
||||
|
||||
---
|
||||
"Date range rounding":
|
||||
- skip:
|
||||
version: " - 7.6.99"
|
||||
reason: "This part tests rounding behaviour changed in 7.7"
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
id: 1
|
||||
body: { "date_range" : { "gte": "2019-12-14T12:00:00.000Z", "lte": "2019-12-14T13:00:00.000Z" } }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
id: 2
|
||||
body: { "date_range" : { "gte": "2019-12-15T12:00:00.000Z", "lte": "2019-12-15T13:00:00.000Z" } }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
id: 3
|
||||
body: { "date_range" : { "gte": "2019-12-16T12:00:00.000Z", "lte": "2019-12-16T13:00:00.000Z" } }
|
||||
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body: { "size" : 0, "query" : { "range" : { "date_range" : { "gt": "2019-12-15||/d", "relation": "within" } } } }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body: { "size" : 0, "query" : { "range" : { "date_range" : { "gte": "2019-12-15||/d", "relation": "within" } } } }
|
||||
|
||||
- match: { hits.total: 2 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body: { "size" : 0, "query" : { "range" : { "date_range" : { "lt": "2019-12-15||/d", "relation": "within" } } } }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body: { "size" : 0, "query" : { "range" : { "date_range" : { "lte": "2019-12-15||/d", "relation": "within" } } } }
|
||||
|
||||
- match: { hits.total: 2 }
|
||||
|
|
|
@ -63,6 +63,7 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
import static org.elasticsearch.common.time.DateUtils.toLong;
|
||||
|
||||
|
@ -371,7 +372,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
if (lowerTerm == null) {
|
||||
l = Long.MIN_VALUE;
|
||||
} else {
|
||||
l = parseToLong(lowerTerm, !includeLower, timeZone, parser, context);
|
||||
l = parseToLong(lowerTerm, !includeLower, timeZone, parser, context::nowInMillis);
|
||||
if (includeLower == false) {
|
||||
++l;
|
||||
}
|
||||
|
@ -379,7 +380,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
if (upperTerm == null) {
|
||||
u = Long.MAX_VALUE;
|
||||
} else {
|
||||
u = parseToLong(upperTerm, includeUpper, timeZone, parser, context);
|
||||
u = parseToLong(upperTerm, includeUpper, timeZone, parser, context::nowInMillis);
|
||||
if (includeUpper == false) {
|
||||
--u;
|
||||
}
|
||||
|
@ -393,7 +394,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
public long parseToLong(Object value, boolean roundUp,
|
||||
@Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||
@Nullable ZoneId zone, @Nullable DateMathParser forcedDateParser, LongSupplier now) {
|
||||
DateMathParser dateParser = dateMathParser();
|
||||
if (forcedDateParser != null) {
|
||||
dateParser = forcedDateParser;
|
||||
|
@ -405,7 +406,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
} else {
|
||||
strValue = value.toString();
|
||||
}
|
||||
Instant instant = dateParser.parse(strValue, context::nowInMillis, roundUp, zone);
|
||||
Instant instant = dateParser.parse(strValue, now, roundUp, zone);
|
||||
return resolution.convert(instant);
|
||||
}
|
||||
|
||||
|
@ -419,7 +420,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
|
||||
long fromInclusive = Long.MIN_VALUE;
|
||||
if (from != null) {
|
||||
fromInclusive = parseToLong(from, !includeLower, timeZone, dateParser, context);
|
||||
fromInclusive = parseToLong(from, !includeLower, timeZone, dateParser, context::nowInMillis);
|
||||
if (includeLower == false) {
|
||||
if (fromInclusive == Long.MAX_VALUE) {
|
||||
return Relation.DISJOINT;
|
||||
|
@ -430,7 +431,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
|
||||
long toInclusive = Long.MAX_VALUE;
|
||||
if (to != null) {
|
||||
toInclusive = parseToLong(to, includeUpper, timeZone, dateParser, context);
|
||||
toInclusive = parseToLong(to, includeUpper, timeZone, dateParser, context::nowInMillis);
|
||||
if (includeUpper == false) {
|
||||
if (toInclusive == Long.MIN_VALUE) {
|
||||
return Relation.DISJOINT;
|
||||
|
|
|
@ -232,12 +232,14 @@ public enum RangeType {
|
|||
|
||||
DateMathParser dateMathParser = (parser == null) ?
|
||||
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser;
|
||||
boolean roundUp = includeLower == false; // using "gt" should round lower bound up
|
||||
Long low = lowerTerm == null ? Long.MIN_VALUE :
|
||||
dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(),
|
||||
context::nowInMillis, false, zone).toEpochMilli();
|
||||
context::nowInMillis, roundUp, zone).toEpochMilli();
|
||||
roundUp = includeUpper; // using "lte" should round upper bound up
|
||||
Long high = upperTerm == null ? Long.MAX_VALUE :
|
||||
dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(),
|
||||
context::nowInMillis, false, zone).toEpochMilli();
|
||||
context::nowInMillis, roundUp, zone).toEpochMilli();
|
||||
|
||||
return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone,
|
||||
dateMathParser, context);
|
||||
|
|
|
@ -30,16 +30,15 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -122,7 +121,7 @@ public class DistanceFeatureQueryBuilder extends AbstractQueryBuilder<DistanceFe
|
|||
}
|
||||
Object originObj = origin.origin();
|
||||
if (fieldType instanceof DateFieldType) {
|
||||
long originLong = ((DateFieldType) fieldType).parseToLong(originObj, true, null, null, context);
|
||||
long originLong = ((DateFieldType) fieldType).parseToLong(originObj, true, null, null, context::nowInMillis);
|
||||
TimeValue pivotVal = TimeValue.parseTimeValue(pivot, DistanceFeatureQueryBuilder.class.getSimpleName() + ".pivot");
|
||||
if (((DateFieldType) fieldType).resolution() == DateFieldMapper.Resolution.MILLISECONDS) {
|
||||
return LongPoint.newDistanceFeatureQuery(field, boost, originLong, pivotVal.getMillis());
|
||||
|
@ -213,7 +212,9 @@ public class DistanceFeatureQueryBuilder extends AbstractQueryBuilder<DistanceFe
|
|||
|
||||
@Override
|
||||
public final boolean equals(Object other) {
|
||||
if ((other instanceof Origin) == false) return false;
|
||||
if ((other instanceof Origin) == false) {
|
||||
return false;
|
||||
}
|
||||
Object otherOrigin = ((Origin) other).origin();
|
||||
return this.origin().equals(otherOrigin);
|
||||
}
|
||||
|
|
|
@ -318,7 +318,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
|
|||
if (originString == null) {
|
||||
origin = context.nowInMillis();
|
||||
} else {
|
||||
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToLong(originString, false, null, null, context);
|
||||
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToLong(originString, false, null, null, context::nowInMillis);
|
||||
}
|
||||
|
||||
if (scaleString == null) {
|
||||
|
|
|
@ -23,7 +23,9 @@ import org.apache.lucene.document.DoubleRange;
|
|||
import org.apache.lucene.document.FloatRange;
|
||||
import org.apache.lucene.document.InetAddressRange;
|
||||
import org.apache.lucene.document.IntRange;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.LongRange;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.queries.BinaryDocValuesRangeQuery;
|
||||
import org.apache.lucene.search.IndexOrDocValuesQuery;
|
||||
import org.apache.lucene.search.PointRangeQuery;
|
||||
|
@ -102,14 +104,29 @@ public class RangeFieldQueryStringQueryBuilderTests extends AbstractQueryTestCas
|
|||
RangeFieldMapper.RangeFieldType type = (RangeFieldMapper.RangeFieldType) context.fieldMapper(DATE_RANGE_FIELD_NAME);
|
||||
DateMathParser parser = type.dateMathParser;
|
||||
Query query = new QueryStringQueryBuilder(DATE_RANGE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(createShardContext());
|
||||
String lowerBoundExact = "2010-01-01T00:00:00.000";
|
||||
String upperBoundExact = "2018-01-01T23:59:59.999";
|
||||
Query range = LongRange.newIntersectsQuery(DATE_RANGE_FIELD_NAME,
|
||||
new long[]{ parser.parse("2010-01-01", () -> 0).toEpochMilli()},
|
||||
new long[]{ parser.parse("2018-01-01", () -> 0).toEpochMilli()});
|
||||
new long[]{ parser.parse(lowerBoundExact, () -> 0).toEpochMilli()},
|
||||
new long[]{ parser.parse(upperBoundExact, () -> 0).toEpochMilli()});
|
||||
Query dv = RangeType.DATE.dvRangeQuery(DATE_RANGE_FIELD_NAME,
|
||||
BinaryDocValuesRangeQuery.QueryType.INTERSECTS,
|
||||
parser.parse("2010-01-01", () -> 0).toEpochMilli(),
|
||||
parser.parse("2018-01-01", () -> 0).toEpochMilli(), true, true);
|
||||
parser.parse(lowerBoundExact, () -> 0).toEpochMilli(),
|
||||
parser.parse(upperBoundExact, () -> 0).toEpochMilli(), true, true);
|
||||
assertEquals(new IndexOrDocValuesQuery(range, dv), query);
|
||||
|
||||
// also make sure the produced bounds are the same as on a regular `date` field
|
||||
DateFieldMapper.DateFieldType dateType = (DateFieldMapper.DateFieldType) context.fieldMapper(DATE_FIELD_NAME);
|
||||
parser = dateType.dateMathParser;
|
||||
Query queryOnDateField = new QueryStringQueryBuilder(DATE_FIELD_NAME + ":[2010-01-01 TO 2018-01-01]").toQuery(createShardContext());
|
||||
Query controlQuery = LongPoint.newRangeQuery(DATE_FIELD_NAME,
|
||||
new long[]{ parser.parse(lowerBoundExact, () -> 0).toEpochMilli()},
|
||||
new long[]{ parser.parse(upperBoundExact, () -> 0).toEpochMilli()});
|
||||
|
||||
Query controlDv = SortedNumericDocValuesField.newSlowRangeQuery(DATE_FIELD_NAME,
|
||||
parser.parse(lowerBoundExact, () -> 0).toEpochMilli(),
|
||||
parser.parse(upperBoundExact, () -> 0).toEpochMilli());
|
||||
assertEquals(new IndexOrDocValuesQuery(controlQuery, controlDv), queryOnDateField);
|
||||
}
|
||||
|
||||
public void testIPRangeQuery() throws Exception {
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.RangeFieldMapper.RangeFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
@ -162,7 +163,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
|
|||
assertThat(rangeQuery, instanceOf(IndexOrDocValuesQuery.class));
|
||||
assertThat(((IndexOrDocValuesQuery) rangeQuery).getIndexQuery(), instanceOf(MatchNoDocsQuery.class));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* check that we catch cases where the user specifies larger "from" than "to" value, not counting the include upper/lower settings
|
||||
*/
|
||||
|
@ -231,14 +232,15 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
|
|||
return new QueryShardContext(0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null,
|
||||
xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null);
|
||||
}
|
||||
|
||||
|
||||
public void testDateRangeQueryUsingMappingFormat() {
|
||||
QueryShardContext context = createContext();
|
||||
RangeFieldType fieldType = new RangeFieldType(RangeType.DATE);
|
||||
fieldType.setName(FIELDNAME);
|
||||
fieldType.setIndexOptions(IndexOptions.DOCS);
|
||||
fieldType.setHasDocValues(false);
|
||||
ShapeRelation relation = randomFrom(ShapeRelation.values());
|
||||
// don't use DISJOINT here because it doesn't work on date fields which we want to compare bounds with
|
||||
ShapeRelation relation = randomValueOtherThan(ShapeRelation.DISJOINT,() -> randomFrom(ShapeRelation.values()));
|
||||
|
||||
// dates will break the default format, month/day of month is turned around in the format
|
||||
final String from = "2016-15-06T15:29:50+08:00";
|
||||
|
@ -257,7 +259,61 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
fieldType.setDateTimeFormatter(formatter);
|
||||
final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, null, context);
|
||||
assertEquals("field:<ranges:[1465975790000 : 1466062190000]>", query.toString());
|
||||
assertEquals("field:<ranges:[1465975790000 : 1466062190999]>", query.toString());
|
||||
|
||||
// compare lower and upper bounds with what we would get on a `date` field
|
||||
DateFieldType dateFieldType = new DateFieldType();
|
||||
dateFieldType.setName(FIELDNAME);
|
||||
dateFieldType.setDateTimeFormatter(formatter);
|
||||
final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, null, context);
|
||||
assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* We would like to ensure lower and upper bounds are consistent between queries on a `date` and a`date_range`
|
||||
* field, so we randomize a few cases and compare the generated queries here
|
||||
*/
|
||||
public void testDateVsDateRangeBounds() {
|
||||
QueryShardContext context = createContext();
|
||||
RangeFieldType fieldType = new RangeFieldType(RangeType.DATE);
|
||||
fieldType.setName(FIELDNAME);
|
||||
fieldType.setIndexOptions(IndexOptions.DOCS);
|
||||
fieldType.setHasDocValues(false);
|
||||
|
||||
// date formatter that truncates seconds, so we get some rounding behavior
|
||||
final DateFormatter formatter = DateFormatter.forPattern("yyyy-dd-MM'T'HH:mm");
|
||||
long lower = randomLongBetween(formatter.parseMillis("2000-01-01T00:00"), formatter.parseMillis("2010-01-01T00:00"));
|
||||
long upper = randomLongBetween(formatter.parseMillis("2011-01-01T00:00"), formatter.parseMillis("2020-01-01T00:00"));
|
||||
|
||||
fieldType.setDateTimeFormatter(formatter);
|
||||
String lowerAsString = formatter.formatMillis(lower);
|
||||
String upperAsString = formatter.formatMillis(upper);
|
||||
// also add date math rounding to days occasionally
|
||||
if (randomBoolean()) {
|
||||
lowerAsString = lowerAsString + "||/d";
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
upperAsString = upperAsString + "||/d";
|
||||
}
|
||||
boolean includeLower = randomBoolean();
|
||||
boolean includeUpper = randomBoolean();
|
||||
final Query query = fieldType.rangeQuery(lowerAsString, upperAsString, includeLower, includeUpper, ShapeRelation.INTERSECTS, null,
|
||||
null, context);
|
||||
|
||||
// get exact lower and upper bounds similar to what we would parse for `date` fields for same input strings
|
||||
DateFieldType dateFieldType = new DateFieldType();
|
||||
long lowerBoundLong = dateFieldType.parseToLong(lowerAsString, !includeLower, null, formatter.toDateMathParser(), () -> 0);
|
||||
if (includeLower == false) {
|
||||
++lowerBoundLong;
|
||||
}
|
||||
long upperBoundLong = dateFieldType.parseToLong(upperAsString, includeUpper, null, formatter.toDateMathParser(), () -> 0);
|
||||
if (includeUpper == false) {
|
||||
--upperBoundLong;
|
||||
}
|
||||
|
||||
// check that using this bounds we get similar query when constructing equivalent query on date_range field
|
||||
Query range = LongRange.newIntersectsQuery(FIELDNAME, new long[] { lowerBoundLong }, new long[] { upperBoundLong });
|
||||
assertEquals(range, query);
|
||||
}
|
||||
|
||||
private Query getExpectedRangeQuery(ShapeRelation relation, Object from, Object to, boolean includeLower, boolean includeUpper) {
|
||||
|
|
|
@ -28,12 +28,11 @@ import org.elasticsearch.common.lucene.search.Queries;
|
|||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.DistanceFeatureQueryBuilder.Origin;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.elasticsearch.index.query.DistanceFeatureQueryBuilder.Origin;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
|
@ -88,7 +87,7 @@ public class DistanceFeatureQueryBuilderTests extends AbstractQueryTestCase<Dist
|
|||
} else { // if (fieldName.equals(DATE_FIELD_NAME))
|
||||
MapperService mapperService = context.getMapperService();
|
||||
DateFieldType fieldType = (DateFieldType) mapperService.fullName(fieldName);
|
||||
long originLong = fieldType.parseToLong(origin, true, null, null, context);
|
||||
long originLong = fieldType.parseToLong(origin, true, null, null, context::nowInMillis);
|
||||
TimeValue pivotVal = TimeValue.parseTimeValue(pivot, DistanceFeatureQueryBuilder.class.getSimpleName() + ".pivot");
|
||||
long pivotLong;
|
||||
if (fieldType.resolution() == DateFieldMapper.Resolution.MILLISECONDS) {
|
||||
|
|
|
@ -173,12 +173,12 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
((DateFieldMapper.DateFieldType) mappedFieldType).parseToLong(queryBuilder.from(),
|
||||
queryBuilder.includeLower(),
|
||||
queryBuilder.getDateTimeZone(),
|
||||
queryBuilder.getForceDateParser(), context);
|
||||
queryBuilder.getForceDateParser(), context::nowInMillis);
|
||||
toInMillis = queryBuilder.to() == null ? null :
|
||||
((DateFieldMapper.DateFieldType) mappedFieldType).parseToLong(queryBuilder.to(),
|
||||
queryBuilder.includeUpper(),
|
||||
queryBuilder.getDateTimeZone(),
|
||||
queryBuilder.getForceDateParser(), context);
|
||||
queryBuilder.getForceDateParser(), context::nowInMillis);
|
||||
} else {
|
||||
fromInMillis = toInMillis = null;
|
||||
fail("unexpected mapped field type: [" + mappedFieldType.getClass() + "] " + mappedFieldType.toString());
|
||||
|
|
Loading…
Reference in New Issue