Removes the now callable in the date math parser in favour of a LongSupplier (#20796)
Previous to this change the DateMathParser accepted a Callable<Long> to use for accessing the now value. The implementations of this callable would fall back on System.currentTimeMillis() if there was no context object provided. This is no longer necessary for two reasons: We should not fall back to System.currentTimeMillis() as a context should always be provided. This ensures consistency between shards for the now value in all cases We should use a LongSupplier rather than requiring an implementation of Callable. This means that we can just pass in context::noInMillis for this parameter and not have not implement anything.
This commit is contained in:
parent
0a1b8a3176
commit
eba60ee63c
|
@ -48,7 +48,6 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class IndexNameExpressionResolver extends AbstractComponent {
|
||||
|
@ -848,12 +847,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
|
|||
DateTimeFormatter parser = dateFormatter.withZone(timeZone);
|
||||
FormatDateTimeFormatter formatter = new FormatDateTimeFormatter(dateFormatterPattern, parser, Locale.ROOT);
|
||||
DateMathParser dateMathParser = new DateMathParser(formatter);
|
||||
long millis = dateMathParser.parse(mathExpression, new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return context.getStartTime();
|
||||
}
|
||||
}, false, timeZone);
|
||||
long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone);
|
||||
|
||||
String time = formatter.printer().print(millis);
|
||||
beforePlaceHolderSb.append(time);
|
||||
|
|
|
@ -25,11 +25,11 @@ import org.joda.time.MutableDateTime;
|
|||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
/**
|
||||
* A parser for date/time formatted text with optional date math.
|
||||
*
|
||||
*
|
||||
* The format of the datetime is configurable, and unix timestamps can also be used. Datemath
|
||||
* is appended to a datetime with the following syntax:
|
||||
* <code>||[+-/](\d+)?[yMwdhHms]</code>.
|
||||
|
@ -43,19 +43,19 @@ public class DateMathParser {
|
|||
this.dateTimeFormatter = dateTimeFormatter;
|
||||
}
|
||||
|
||||
public long parse(String text, Callable<Long> now) {
|
||||
public long parse(String text, LongSupplier now) {
|
||||
return parse(text, now, false, null);
|
||||
}
|
||||
|
||||
// Note: we take a callable here for the timestamp in order to be able to figure out
|
||||
// if it has been used. For instance, the request cache does not cache requests that make
|
||||
// use of `now`.
|
||||
public long parse(String text, Callable<Long> now, boolean roundUp, DateTimeZone timeZone) {
|
||||
public long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone timeZone) {
|
||||
long time;
|
||||
String mathString;
|
||||
if (text.startsWith("now")) {
|
||||
try {
|
||||
time = now.call();
|
||||
time = now.getAsLong();
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchParseException("could not read the current timestamp", e);
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ public class DateMathParser {
|
|||
throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (i >= mathString.length()) {
|
||||
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ public class DateMathParser {
|
|||
try {
|
||||
return parser.parseMillis(value);
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
||||
|
||||
throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]", e, value, dateTimeFormatter.format());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,8 +54,6 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
|
||||
|
||||
/** A {@link FieldMapper} for ip addresses. */
|
||||
|
@ -361,15 +359,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
} else {
|
||||
strValue = value.toString();
|
||||
}
|
||||
return dateParser.parse(strValue, now(context), roundUp, zone);
|
||||
}
|
||||
|
||||
private static Callable<Long> now(QueryRewriteContext context) {
|
||||
return () -> {
|
||||
return context != null
|
||||
? context.nowInMillis()
|
||||
: System.currentTimeMillis();
|
||||
};
|
||||
return dateParser.parse(strValue, context::nowInMillis, roundUp, zone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -54,7 +54,6 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter;
|
||||
|
@ -453,7 +452,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
} else {
|
||||
strValue = value.toString();
|
||||
}
|
||||
return dateParser.parse(strValue, now(context), inclusive, zone);
|
||||
return dateParser.parse(strValue, context::nowInMillis, inclusive, zone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -485,17 +484,6 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
return (DateFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
private static Callable<Long> now(QueryRewriteContext context) {
|
||||
return new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() {
|
||||
return context != null
|
||||
? context.nowInMillis()
|
||||
: System.currentTimeMillis();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
|
|
|
@ -41,7 +41,7 @@ import java.text.ParseException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
/** A formatter for values as returned by the fielddata/doc-values APIs. */
|
||||
public interface DocValueFormat extends NamedWriteable {
|
||||
|
@ -63,11 +63,11 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
|
||||
/** Parse a value that was formatted with {@link #format(long)} back to the
|
||||
* original long value. */
|
||||
long parseLong(String value, boolean roundUp, Callable<Long> now);
|
||||
long parseLong(String value, boolean roundUp, LongSupplier now);
|
||||
|
||||
/** Parse a value that was formatted with {@link #format(double)} back to
|
||||
* the original double value. */
|
||||
double parseDouble(String value, boolean roundUp, Callable<Long> now);
|
||||
double parseDouble(String value, boolean roundUp, LongSupplier now);
|
||||
|
||||
/** Parse a value that was formatted with {@link #format(BytesRef)} back
|
||||
* to the original BytesRef. */
|
||||
|
@ -100,7 +100,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
public long parseLong(String value, boolean roundUp, LongSupplier now) {
|
||||
double d = Double.parseDouble(value);
|
||||
if (roundUp) {
|
||||
d = Math.ceil(d);
|
||||
|
@ -111,7 +111,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
|
||||
return Double.parseDouble(value);
|
||||
}
|
||||
|
||||
|
@ -166,12 +166,12 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
public long parseLong(String value, boolean roundUp, LongSupplier now) {
|
||||
return parser.parse(value, now, roundUp, timeZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
|
||||
return parseLong(value, roundUp, now);
|
||||
}
|
||||
|
||||
|
@ -208,12 +208,12 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
public long parseLong(String value, boolean roundUp, LongSupplier now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
@ -250,7 +250,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
public long parseLong(String value, boolean roundUp, LongSupplier now) {
|
||||
switch (value) {
|
||||
case "false":
|
||||
return 0;
|
||||
|
@ -261,7 +261,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
@ -300,12 +300,12 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
public long parseLong(String value, boolean roundUp, LongSupplier now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
@ -358,7 +358,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public long parseLong(String value, boolean roundUp, Callable<Long> now) {
|
||||
public long parseLong(String value, boolean roundUp, LongSupplier now) {
|
||||
Number n;
|
||||
try {
|
||||
n = format.parse(value);
|
||||
|
@ -379,7 +379,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
|
||||
public double parseDouble(String value, boolean roundUp, LongSupplier now) {
|
||||
Number n;
|
||||
try {
|
||||
n = format.parse(value);
|
||||
|
|
|
@ -25,8 +25,8 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -36,26 +36,17 @@ public class DateMathParserTests extends ESTestCase {
|
|||
FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis");
|
||||
DateMathParser parser = new DateMathParser(formatter);
|
||||
|
||||
private static Callable<Long> callable(final long value) {
|
||||
return new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return value;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
void assertDateMathEquals(String toTest, String expected) {
|
||||
assertDateMathEquals(toTest, expected, 0, false, null);
|
||||
}
|
||||
|
||||
void assertDateMathEquals(String toTest, String expected, final long now, boolean roundUp, DateTimeZone timeZone) {
|
||||
long gotMillis = parser.parse(toTest, callable(now), roundUp, timeZone);
|
||||
long gotMillis = parser.parse(toTest, () -> now, roundUp, timeZone);
|
||||
assertDateEquals(gotMillis, toTest, expected);
|
||||
}
|
||||
|
||||
void assertDateEquals(long gotMillis, String original, String expected) {
|
||||
long expectedMillis = parser.parse(expected, callable(0));
|
||||
long expectedMillis = parser.parse(expected, () -> 0);
|
||||
if (gotMillis != expectedMillis) {
|
||||
fail("Date math not equal\n" +
|
||||
"Original : " + original + "\n" +
|
||||
|
@ -132,7 +123,7 @@ public class DateMathParserTests extends ESTestCase {
|
|||
|
||||
|
||||
public void testNow() {
|
||||
final long now = parser.parse("2014-11-18T14:27:32", callable(0), false, null);
|
||||
final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, null);
|
||||
|
||||
assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null);
|
||||
assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null);
|
||||
|
@ -204,7 +195,7 @@ public class DateMathParserTests extends ESTestCase {
|
|||
|
||||
// also check other time units
|
||||
DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_second||dateOptionalTime"));
|
||||
long datetime = parser.parse("1418248078", callable(0));
|
||||
long datetime = parser.parse("1418248078", () -> 0);
|
||||
assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000");
|
||||
|
||||
// a timestamp before 10000 is a year
|
||||
|
@ -217,7 +208,7 @@ public class DateMathParserTests extends ESTestCase {
|
|||
|
||||
void assertParseException(String msg, String date, String exc) {
|
||||
try {
|
||||
parser.parse(date, callable(0));
|
||||
parser.parse(date, () -> 0);
|
||||
fail("Date: " + date + "\n" + msg);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(ExceptionsHelper.detailedMessage(e).contains(exc), equalTo(true));
|
||||
|
@ -239,12 +230,9 @@ public class DateMathParserTests extends ESTestCase {
|
|||
|
||||
public void testOnlyCallsNowIfNecessary() {
|
||||
final AtomicBoolean called = new AtomicBoolean();
|
||||
final Callable<Long> now = new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
called.set(true);
|
||||
return 42L;
|
||||
}
|
||||
final LongSupplier now = () -> {
|
||||
called.set(true);
|
||||
return 42L;
|
||||
};
|
||||
parser.parse("2014-11-18T14:27:32", now, false, null);
|
||||
assertFalse(called.get());
|
||||
|
@ -255,7 +243,7 @@ public class DateMathParserTests extends ESTestCase {
|
|||
public void testThatUnixTimestampMayNotHaveTimeZone() {
|
||||
DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_millis"));
|
||||
try {
|
||||
parser.parse("1234567890123", callable(42), false, DateTimeZone.forTimeZone(TimeZone.getTimeZone("CET")));
|
||||
parser.parse("1234567890123", () -> 42, false, DateTimeZone.forTimeZone(TimeZone.getTimeZone("CET")));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch(ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse date field"));
|
||||
|
|
|
@ -30,11 +30,17 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -44,6 +50,8 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
return new DateFieldMapper.DateFieldType();
|
||||
}
|
||||
|
||||
private static long nowInMillis;
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10);
|
||||
|
@ -59,6 +67,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
((DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA));
|
||||
}
|
||||
});
|
||||
nowInMillis = randomPositiveLong();
|
||||
}
|
||||
|
||||
public void testIsFieldWithinQueryEmptyReader() throws IOException {
|
||||
|
@ -71,26 +80,27 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, null, () -> nowInMillis);
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, true, null, null, null));
|
||||
true, true, null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, false, null, null, null));
|
||||
false, false, null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, true, null, null, null));
|
||||
false, true, null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, false, null, null, null));
|
||||
true, false, null, null, context));
|
||||
}
|
||||
|
||||
public void testIsFieldWithinQuery() throws IOException {
|
||||
|
@ -145,20 +155,31 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build();
|
||||
QueryShardContext context = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(),
|
||||
indexSettings),
|
||||
null, null, null, null, null, null, null, null, null, () -> nowInMillis);
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("field");
|
||||
String date = "2015-10-12T14:10:55";
|
||||
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||
ft.setIndexOptions(IndexOptions.DOCS);
|
||||
assertEquals(LongPoint.newExactQuery("field", instant), ft.termQuery(date, null));
|
||||
assertEquals(LongPoint.newExactQuery("field", instant), ft.termQuery(date, context));
|
||||
|
||||
ft.setIndexOptions(IndexOptions.NONE);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.termQuery(date, null));
|
||||
() -> ft.termQuery(date, context));
|
||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||
}
|
||||
|
||||
public void testRangeQuery() throws IOException {
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build();
|
||||
QueryShardContext context = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings),
|
||||
null, null, null, null, null, null, null, null, null, () -> nowInMillis);
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("field");
|
||||
String date1 = "2015-10-12T14:10:55";
|
||||
|
@ -167,11 +188,11 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date2).getMillis();
|
||||
ft.setIndexOptions(IndexOptions.DOCS);
|
||||
assertEquals(LongPoint.newRangeQuery("field", instant1, instant2),
|
||||
ft.rangeQuery(date1, date2, true, true, null).rewrite(new MultiReader()));
|
||||
ft.rangeQuery(date1, date2, true, true, context).rewrite(new MultiReader()));
|
||||
|
||||
ft.setIndexOptions(IndexOptions.NONE);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery(date1, date2, true, true, null));
|
||||
() -> ft.rangeQuery(date1, date2, true, true, context));
|
||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -47,6 +48,7 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
|
|||
IndexService index = createIndex("test");
|
||||
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
|
||||
DocumentMapper mapper = index.mapperService().documentMapper("type");
|
||||
QueryShardContext context = index.newQueryShardContext();
|
||||
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
@ -67,25 +69,25 @@ public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
|
|||
IndexReader reader = DirectoryReader.open(writer);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", null), 10);
|
||||
TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").fieldType().termQuery("1", null), 10);
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").fieldType().termQuery("1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").fieldType().termQuery("1.1", null), 10);
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").fieldType().termQuery("1.1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").fieldType().termQuery("2010-01-01", null), 10);
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").fieldType().termQuery("2010-01-01", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("1", null), 10);
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("1", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("2", null), 10);
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("2", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("3", null), 10);
|
||||
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("3", context), 10);
|
||||
assertThat(topDocs.totalHits, equalTo(2));
|
||||
writer.close();
|
||||
reader.close();
|
||||
|
|
|
@ -40,12 +40,12 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.Before;
|
||||
|
@ -235,6 +235,12 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testHourFormat() throws Exception {
|
||||
long nowInMillis = randomPositiveLong();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build();
|
||||
QueryShardContext context = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null,
|
||||
null, null, null, null, null, () -> nowInMillis);
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.field("date_detection", false)
|
||||
.startObject("properties").startObject("date_field").field("type", "date").field("format", "HH:mm:ss").endObject().endObject()
|
||||
|
@ -250,12 +256,18 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())));
|
||||
|
||||
LegacyNumericRangeQuery<Long> rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType()
|
||||
.rangeQuery("10:00:00", "11:00:00", true, true, null).rewrite(null);
|
||||
.rangeQuery("10:00:00", "11:00:00", true, true, context).rewrite(null);
|
||||
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(11).millis(), DateTimeZone.UTC).getMillis()));
|
||||
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()));
|
||||
}
|
||||
|
||||
public void testDayWithoutYearFormat() throws Exception {
|
||||
long nowInMillis = randomPositiveLong();
|
||||
Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build();
|
||||
QueryShardContext context = new QueryShardContext(0,
|
||||
new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null,
|
||||
null, null, null, null, null, () -> nowInMillis);
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.field("date_detection", false)
|
||||
.startObject("properties").startObject("date_field").field("type", "date").field("format", "MMM dd HH:mm:ss").endObject().endObject()
|
||||
|
@ -271,7 +283,7 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())));
|
||||
|
||||
LegacyNumericRangeQuery<Long> rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType()
|
||||
.rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null).rewrite(null);
|
||||
.rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, context).rewrite(null);
|
||||
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(35).millis(), DateTimeZone.UTC).getMillis()));
|
||||
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()));
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.joda.Joda;
|
|||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -45,6 +46,8 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||
return new LegacyDateFieldMapper.DateFieldType();
|
||||
}
|
||||
|
||||
private static long nowInMillis;
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
setDummyNullValue(10);
|
||||
|
@ -66,6 +69,7 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||
((LegacyDateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS);
|
||||
}
|
||||
});
|
||||
nowInMillis = randomPositiveLong();
|
||||
}
|
||||
|
||||
public void testIsFieldWithinQueryEmptyReader() throws IOException {
|
||||
|
@ -78,26 +82,27 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, null, () -> nowInMillis);
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, context));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, true, null, null, null));
|
||||
true, true, null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, false, null, null, null));
|
||||
false, false, null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, true, null, null, null));
|
||||
false, true, null, null, context));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, false, null, null, null));
|
||||
true, false, null, null, context));
|
||||
}
|
||||
|
||||
public void testIsFieldWithinQuery() throws IOException {
|
||||
|
|
|
@ -50,7 +50,6 @@ import java.util.HashMap;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -1009,20 +1008,13 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
|
||||
DateMathParser parser = new DateMathParser(Joda.getStrictStandardDateFormatter());
|
||||
|
||||
final Callable<Long> callable = new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() throws Exception {
|
||||
return System.currentTimeMillis();
|
||||
}
|
||||
};
|
||||
|
||||
// we pick a random timezone offset of +12/-12 hours and insert two documents
|
||||
// one at 00:00 in that time zone and one at 12:00
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
int timeZoneHourOffset = randomIntBetween(-12, 12);
|
||||
DateTimeZone timezone = DateTimeZone.forOffsetHours(timeZoneHourOffset);
|
||||
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", callable, false, timezone), DateTimeZone.UTC);
|
||||
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", callable, false, timezone), DateTimeZone.UTC);
|
||||
DateTime timeZoneStartToday = new DateTime(parser.parse("now/d", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
|
||||
DateTime timeZoneNoonToday = new DateTime(parser.parse("now/d+12h", System::currentTimeMillis, false, timezone), DateTimeZone.UTC);
|
||||
builders.add(indexDoc(index, timeZoneStartToday, 1));
|
||||
builders.add(indexDoc(index, timeZoneNoonToday, 2));
|
||||
indexRandom(true, builders);
|
||||
|
|
Loading…
Reference in New Issue