Currently the `time_zone` parameter in `query_string` queries gets applied correctly only when using the range syntax, e.g "date:[2020-01-02 TO 2020-01-05]. When a date field gets searched without explicit range syntax, e.g. "date:"2020-01-01" we internally create a range query than uses the specified date as start date and rounds up to the next underspecified units for the end date (e.g. here 2020-01-01T23:59:59) without considering the `time_zone` settings. This change adds a check in QueryStringQueryParser to detect this scenario early where we have access to the time zone information and directly create a range query using it. Closes #55813
This commit is contained in:
parent
cc119c3853
commit
73b64908b2
|
@ -48,6 +48,7 @@ import org.elasticsearch.common.regex.Regex;
|
|||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
@ -323,6 +324,10 @@ public class QueryStringQueryParser extends XQueryParser {
|
|||
}
|
||||
return getRangeQuery(field, null, queryText.substring(1), true, false);
|
||||
}
|
||||
// if we are querying a single date field, we also create a range query that leverages the time zone setting
|
||||
if (context.fieldMapper(field) instanceof DateFieldType && this.timeZone != null) {
|
||||
return getRangeQuery(field, queryText, queryText, true, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.analysis.MockSynonymAnalyzer;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.BlendedTermQuery;
|
||||
import org.apache.lucene.search.AutomatonQuery;
|
||||
|
@ -30,6 +32,7 @@ import org.apache.lucene.search.BoostQuery;
|
|||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.IndexOrDocValuesQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
|
@ -800,6 +803,26 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testToQueryDateWithTimeZone() throws Exception {
|
||||
QueryStringQueryBuilder qsq = queryStringQuery(DATE_FIELD_NAME + ":1970-01-01");
|
||||
QueryShardContext context = createShardContext();
|
||||
Query query = qsq.toQuery(context);
|
||||
assertThat(query, instanceOf(IndexOrDocValuesQuery.class));
|
||||
long lower = 0; // 1970-01-01T00:00:00.999 UTC
|
||||
long upper = 86399999; // 1970-01-01T23:59:59.999 UTC
|
||||
assertEquals(calculateExpectedDateQuery(lower, upper), query);
|
||||
int msPerHour = 3600000;
|
||||
assertEquals(calculateExpectedDateQuery(lower - msPerHour, upper - msPerHour), qsq.timeZone("+01:00").toQuery(context));
|
||||
assertEquals(calculateExpectedDateQuery(lower + msPerHour, upper + msPerHour), qsq.timeZone("-01:00").toQuery(context));
|
||||
}
|
||||
|
||||
private IndexOrDocValuesQuery calculateExpectedDateQuery(long lower, long upper) {
|
||||
Query query = LongPoint.newRangeQuery(DATE_FIELD_NAME, lower, upper);
|
||||
Query dv = SortedNumericDocValuesField.newSlowRangeQuery(DATE_FIELD_NAME, lower, upper);
|
||||
return new IndexOrDocValuesQuery(query, dv);
|
||||
}
|
||||
|
||||
public void testFuzzyNumeric() throws Exception {
|
||||
QueryStringQueryBuilder query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME);
|
||||
QueryShardContext context = createShardContext();
|
||||
|
|
Loading…
Reference in New Issue