Remove SearchContext#current and all it's threadlocals (#20778)
Today SearchContext expose the current context as a thread local which makes any kind of sane interface design very very hard. This PR removes the thread local entirely and instead passes the relevant context anywhere needed. This simplifies state management dramatically and will allow for a much leaner SearchContext interface down the road.
This commit is contained in:
parent
d7d5df8863
commit
9c9afe3f01
|
@ -180,17 +180,17 @@ public class MapperQueryParser extends QueryParser {
|
|||
if (queryText.charAt(0) == '>') {
|
||||
if (queryText.length() > 2) {
|
||||
if (queryText.charAt(1) == '=') {
|
||||
return getRangeQuerySingle(field, queryText.substring(2), null, true, true);
|
||||
return getRangeQuerySingle(field, queryText.substring(2), null, true, true, context);
|
||||
}
|
||||
}
|
||||
return getRangeQuerySingle(field, queryText.substring(1), null, false, true);
|
||||
return getRangeQuerySingle(field, queryText.substring(1), null, false, true, context);
|
||||
} else if (queryText.charAt(0) == '<') {
|
||||
if (queryText.length() > 2) {
|
||||
if (queryText.charAt(1) == '=') {
|
||||
return getRangeQuerySingle(field, null, queryText.substring(2), true, true);
|
||||
return getRangeQuerySingle(field, null, queryText.substring(2), true, true, context);
|
||||
}
|
||||
}
|
||||
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
|
||||
return getRangeQuerySingle(field, null, queryText.substring(1), true, false, context);
|
||||
}
|
||||
}
|
||||
currentFieldType = null;
|
||||
|
@ -290,19 +290,19 @@ public class MapperQueryParser extends QueryParser {
|
|||
Collection<String> fields = extractMultiFields(field);
|
||||
|
||||
if (fields == null) {
|
||||
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive);
|
||||
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive, context);
|
||||
}
|
||||
|
||||
|
||||
if (fields.size() == 1) {
|
||||
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive);
|
||||
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive, context);
|
||||
}
|
||||
|
||||
if (settings.useDisMax()) {
|
||||
List<Query> queries = new ArrayList<>();
|
||||
boolean added = false;
|
||||
for (String mField : fields) {
|
||||
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
|
||||
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context);
|
||||
if (q != null) {
|
||||
added = true;
|
||||
queries.add(applyBoost(mField, q));
|
||||
|
@ -315,7 +315,7 @@ public class MapperQueryParser extends QueryParser {
|
|||
} else {
|
||||
List<BooleanClause> clauses = new ArrayList<>();
|
||||
for (String mField : fields) {
|
||||
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
|
||||
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context);
|
||||
if (q != null) {
|
||||
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
|
||||
}
|
||||
|
@ -326,7 +326,7 @@ public class MapperQueryParser extends QueryParser {
|
|||
}
|
||||
|
||||
private Query getRangeQuerySingle(String field, String part1, String part2,
|
||||
boolean startInclusive, boolean endInclusive) {
|
||||
boolean startInclusive, boolean endInclusive, QueryShardContext context) {
|
||||
currentFieldType = context.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
if (lowercaseExpandedTerms && currentFieldType.tokenized()) {
|
||||
|
@ -338,12 +338,12 @@ public class MapperQueryParser extends QueryParser {
|
|||
Query rangeQuery;
|
||||
if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
||||
LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType;
|
||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
|
||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, context);
|
||||
} else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
||||
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
|
||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
|
||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, context);
|
||||
} else {
|
||||
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive);
|
||||
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, context);
|
||||
}
|
||||
return rangeQuery;
|
||||
} catch (RuntimeException e) {
|
||||
|
|
|
@ -152,7 +152,6 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(request.shardId(), request.types(),
|
||||
request.nowInMillis(), request.filteringAliases());
|
||||
SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
|
||||
SearchContext.setCurrent(searchContext);
|
||||
try {
|
||||
ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query());
|
||||
searchContext.parsedQuery(parsedQuery);
|
||||
|
@ -166,7 +165,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
valid = false;
|
||||
error = e.getMessage();
|
||||
} finally {
|
||||
Releasables.close(searchContext, () -> SearchContext.removeCurrent());
|
||||
Releasables.close(searchContext);
|
||||
}
|
||||
|
||||
return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error);
|
||||
|
|
|
@ -91,7 +91,6 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
new String[]{request.type()}, request.nowInMillis, request.filteringAlias());
|
||||
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
|
||||
Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
|
||||
SearchContext.setCurrent(context);
|
||||
Engine.GetResult result = null;
|
||||
try {
|
||||
result = context.indexShard().get(new Engine.Get(false, uidTerm));
|
||||
|
@ -118,7 +117,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("Could not explain", e);
|
||||
} finally {
|
||||
Releasables.close(result, context, () -> SearchContext.removeCurrent());
|
||||
Releasables.close(result, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -449,9 +449,9 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
|
||||
* {@link QueryShardContext#setTypes(String...)}
|
||||
*/
|
||||
public QueryShardContext newQueryShardContext(IndexReader indexReader, LongSupplier nowInMillis) {
|
||||
public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) {
|
||||
return new QueryShardContext(
|
||||
indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
|
||||
shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
|
||||
similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(),
|
||||
nodeServicesProvider.getClient(), indexReader,
|
||||
nodeServicesProvider.getClusterService().state(),
|
||||
|
@ -464,7 +464,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||
* used for rewriting since it does not know about the current {@link IndexReader}.
|
||||
*/
|
||||
public QueryShardContext newQueryShardContext() {
|
||||
return newQueryShardContext(null, threadPool::estimatedTimeInMillis);
|
||||
return newQueryShardContext(0, null, threadPool::estimatedTimeInMillis);
|
||||
}
|
||||
|
||||
public ThreadPool getThreadPool() {
|
||||
|
|
|
@ -90,7 +90,7 @@ public class FieldsVisitor extends StoredFieldVisitor {
|
|||
}
|
||||
List<Object> fieldValues = entry.getValue();
|
||||
for (int i = 0; i < fieldValues.size(); i++) {
|
||||
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
|
||||
fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ public class SingleFieldsVisitor extends FieldsVisitor {
|
|||
return;
|
||||
}
|
||||
for (int i = 0; i < fieldValues.size(); i++) {
|
||||
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
|
||||
fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,7 +104,7 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
|
||||
|
||||
@Override
|
||||
public BytesReference valueForSearch(Object value) {
|
||||
public BytesReference valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -164,7 +165,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Boolean valueForSearch(Object value) {
|
||||
public Boolean valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -197,7 +198,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
return new TermRangeQuery(name(),
|
||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||
|
|
|
@ -43,9 +43,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -172,15 +172,17 @@ public class DateFieldMapper extends FieldMapper {
|
|||
final boolean includeUpper;
|
||||
final DateTimeZone timeZone;
|
||||
final DateMathParser forcedDateParser;
|
||||
private QueryShardContext queryShardContext;
|
||||
|
||||
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||
DateTimeZone timeZone, DateMathParser forcedDateParser) {
|
||||
DateTimeZone timeZone, DateMathParser forcedDateParser, QueryShardContext queryShardContext) {
|
||||
this.lowerTerm = lowerTerm;
|
||||
this.upperTerm = upperTerm;
|
||||
this.includeLower = includeLower;
|
||||
this.includeUpper = includeUpper;
|
||||
this.timeZone = timeZone;
|
||||
this.forcedDateParser = forcedDateParser;
|
||||
this.queryShardContext = queryShardContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -189,7 +191,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
if (rewritten != this) {
|
||||
return rewritten;
|
||||
}
|
||||
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
||||
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, queryShardContext);
|
||||
}
|
||||
|
||||
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
||||
|
@ -301,7 +303,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
Query query = innerRangeQuery(value, value, true, true, null, null);
|
||||
Query query = innerRangeQuery(value, value, true, true, null, null, context);
|
||||
if (boost() != 1f) {
|
||||
query = new BoostQuery(query, boost());
|
||||
}
|
||||
|
@ -309,19 +311,19 @@ public class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null);
|
||||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context);
|
||||
}
|
||||
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
||||
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context);
|
||||
}
|
||||
|
||||
Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
DateMathParser parser = forcedDateParser == null
|
||||
? dateMathParser
|
||||
|
@ -330,7 +332,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
if (lowerTerm == null) {
|
||||
l = Long.MIN_VALUE;
|
||||
} else {
|
||||
l = parseToMilliseconds(lowerTerm, !includeLower, timeZone, parser);
|
||||
l = parseToMilliseconds(lowerTerm, !includeLower, timeZone, parser, context);
|
||||
if (includeLower == false) {
|
||||
++l;
|
||||
}
|
||||
|
@ -338,7 +340,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
if (upperTerm == null) {
|
||||
u = Long.MAX_VALUE;
|
||||
} else {
|
||||
u = parseToMilliseconds(upperTerm, includeUpper, timeZone, parser);
|
||||
u = parseToMilliseconds(upperTerm, includeUpper, timeZone, parser, context);
|
||||
if (includeUpper == false) {
|
||||
--u;
|
||||
}
|
||||
|
@ -347,7 +349,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
public long parseToMilliseconds(Object value, boolean roundUp,
|
||||
@Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
|
||||
@Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||
DateMathParser dateParser = dateMathParser();
|
||||
if (forcedDateParser != null) {
|
||||
dateParser = forcedDateParser;
|
||||
|
@ -359,14 +361,13 @@ public class DateFieldMapper extends FieldMapper {
|
|||
} else {
|
||||
strValue = value.toString();
|
||||
}
|
||||
return dateParser.parse(strValue, now(), roundUp, zone);
|
||||
return dateParser.parse(strValue, now(context), roundUp, zone);
|
||||
}
|
||||
|
||||
private static Callable<Long> now() {
|
||||
private static Callable<Long> now(QueryRewriteContext context) {
|
||||
return () -> {
|
||||
final SearchContext context = SearchContext.current();
|
||||
return context != null
|
||||
? context.getQueryShardContext().nowInMillis()
|
||||
? context.nowInMillis()
|
||||
: System.currentTimeMillis();
|
||||
};
|
||||
}
|
||||
|
@ -390,7 +391,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
public Relation isFieldWithinQuery(IndexReader reader,
|
||||
Object from, Object to,
|
||||
boolean includeLower, boolean includeUpper,
|
||||
DateTimeZone timeZone, DateMathParser dateParser) throws IOException {
|
||||
DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
|
||||
if (dateParser == null) {
|
||||
dateParser = this.dateMathParser;
|
||||
}
|
||||
|
@ -405,7 +406,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
|
||||
long fromInclusive = Long.MIN_VALUE;
|
||||
if (from != null) {
|
||||
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser);
|
||||
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context);
|
||||
if (includeLower == false) {
|
||||
if (fromInclusive == Long.MAX_VALUE) {
|
||||
return Relation.DISJOINT;
|
||||
|
@ -416,7 +417,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
|
||||
long toInclusive = Long.MAX_VALUE;
|
||||
if (to != null) {
|
||||
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser);
|
||||
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context);
|
||||
if (includeUpper == false) {
|
||||
if (toInclusive == Long.MIN_VALUE) {
|
||||
return Relation.DISJOINT;
|
||||
|
@ -441,7 +442,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
Long val = (Long) value;
|
||||
if (val == null) {
|
||||
return null;
|
||||
|
|
|
@ -178,7 +178,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
InetAddress lower;
|
||||
if (lowerTerm == null) {
|
||||
|
@ -231,7 +231,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -134,7 +134,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
node.put("index", index);
|
||||
}
|
||||
|
||||
|
||||
return new StringFieldMapper.TypeParser().parse(name, node, parserContext);
|
||||
}
|
||||
KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name);
|
||||
|
@ -196,7 +196,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -131,7 +132,7 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Byte valueForSearch(Object value) {
|
||||
public Byte valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -146,7 +147,7 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : (int)parseValue(lowerTerm),
|
||||
upperTerm == null ? null : (int)parseValue(upperTerm),
|
||||
|
|
|
@ -43,8 +43,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -184,14 +185,17 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
final boolean includeUpper;
|
||||
final DateTimeZone timeZone;
|
||||
final DateMathParser forcedDateParser;
|
||||
private QueryShardContext context;
|
||||
|
||||
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) {
|
||||
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone,
|
||||
DateMathParser forcedDateParser, QueryShardContext context) {
|
||||
this.lowerTerm = lowerTerm;
|
||||
this.upperTerm = upperTerm;
|
||||
this.includeLower = includeLower;
|
||||
this.includeUpper = includeUpper;
|
||||
this.timeZone = timeZone;
|
||||
this.forcedDateParser = forcedDateParser;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -200,7 +204,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
if (rewritten != this) {
|
||||
return rewritten;
|
||||
}
|
||||
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
||||
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context);
|
||||
}
|
||||
|
||||
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
||||
|
@ -339,7 +343,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
Long val = (Long) value;
|
||||
if (val == null) {
|
||||
return null;
|
||||
|
@ -348,8 +352,8 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null);
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -366,14 +370,20 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
dateTimeFormatter(), minValue, maxValue);
|
||||
}
|
||||
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
||||
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
|
||||
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context);
|
||||
}
|
||||
|
||||
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
||||
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
|
||||
upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
|
||||
lowerTerm == null ? null
|
||||
: parseToMilliseconds(lowerTerm, !includeLower, timeZone,
|
||||
forcedDateParser == null ? dateMathParser : forcedDateParser, context),
|
||||
upperTerm == null ? null
|
||||
: parseToMilliseconds(upperTerm, includeUpper, timeZone,
|
||||
forcedDateParser == null ? dateMathParser : forcedDateParser, context),
|
||||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
|
@ -381,7 +391,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
public Relation isFieldWithinQuery(IndexReader reader,
|
||||
Object from, Object to,
|
||||
boolean includeLower, boolean includeUpper,
|
||||
DateTimeZone timeZone, DateMathParser dateParser) throws IOException {
|
||||
DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
|
||||
if (dateParser == null) {
|
||||
dateParser = this.dateMathParser;
|
||||
}
|
||||
|
@ -397,7 +407,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
|
||||
long fromInclusive = Long.MIN_VALUE;
|
||||
if (from != null) {
|
||||
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser);
|
||||
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context);
|
||||
if (includeLower == false) {
|
||||
if (fromInclusive == Long.MAX_VALUE) {
|
||||
return Relation.DISJOINT;
|
||||
|
@ -408,7 +418,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
|
||||
long toInclusive = Long.MAX_VALUE;
|
||||
if (to != null) {
|
||||
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser);
|
||||
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context);
|
||||
if (includeUpper == false) {
|
||||
if (toInclusive == Long.MIN_VALUE) {
|
||||
return Relation.DISJOINT;
|
||||
|
@ -426,7 +436,8 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
|
||||
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone,
|
||||
@Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||
if (value instanceof Long) {
|
||||
return ((Long) value).longValue();
|
||||
}
|
||||
|
@ -442,7 +453,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
} else {
|
||||
strValue = value.toString();
|
||||
}
|
||||
return dateParser.parse(strValue, now(), inclusive, zone);
|
||||
return dateParser.parse(strValue, now(context), inclusive, zone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -474,13 +485,12 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
return (DateFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
private static Callable<Long> now() {
|
||||
private static Callable<Long> now(QueryRewriteContext context) {
|
||||
return new Callable<Long>() {
|
||||
@Override
|
||||
public Long call() {
|
||||
final SearchContext context = SearchContext.current();
|
||||
return context != null
|
||||
? context.getQueryShardContext().nowInMillis()
|
||||
? context.nowInMillis()
|
||||
: System.currentTimeMillis();
|
||||
}
|
||||
};
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -135,7 +136,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public java.lang.Double valueForSearch(Object value) {
|
||||
public java.lang.Double valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -157,7 +158,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
|
||||
upperTerm == null ? null : parseDoubleValue(upperTerm),
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -142,7 +143,7 @@ public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -145,7 +146,7 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
|
|
|
@ -171,7 +171,7 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
|
|||
* IPs should return as a string.
|
||||
*/
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
Long val = (Long) value;
|
||||
if (val == null) {
|
||||
return null;
|
||||
|
@ -210,14 +210,14 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
if (fromTo != null) {
|
||||
return rangeQuery(fromTo[0] == 0 ? null : fromTo[0],
|
||||
fromTo[1] == MAX_IP ? null : fromTo[1], true, false);
|
||||
fromTo[1] == MAX_IP ? null : fromTo[1], true, false, context);
|
||||
}
|
||||
}
|
||||
return super.termQuery(value, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -146,7 +147,7 @@ public class LegacyLongFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : parseLongValue(lowerTerm),
|
||||
upperTerm == null ? null : parseLongValue(upperTerm),
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -135,7 +136,7 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Short valueForSearch(Object value) {
|
||||
public Short valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
@ -150,7 +151,7 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||
lowerTerm == null ? null : (int)parseValue(lowerTerm),
|
||||
upperTerm == null ? null : (int)parseValue(upperTerm),
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.elasticsearch.common.joda.DateMathParser;
|
|||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
@ -303,7 +304,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
/** Given a value that comes from the stored fields API, convert it to the
|
||||
* expected type. For instance a date field would store dates as longs and
|
||||
* format it back to a string in this method. */
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
|
@ -343,7 +344,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
return new ConstantScoreQuery(builder.build());
|
||||
}
|
||||
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
|
||||
}
|
||||
|
||||
|
@ -399,10 +400,10 @@ public abstract class MappedFieldType extends FieldType {
|
|||
* {@link Relation#INTERSECTS}, which is always fine to return when there is
|
||||
* no way to check whether values are actually within bounds. */
|
||||
public Relation isFieldWithinQuery(
|
||||
IndexReader reader,
|
||||
Object from, Object to,
|
||||
boolean includeLower, boolean includeUpper,
|
||||
DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException {
|
||||
IndexReader reader,
|
||||
Object from, Object to,
|
||||
boolean includeLower, boolean includeUpper,
|
||||
DateTimeZone timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
|
||||
return Relation.INTERSECTS;
|
||||
}
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.LongSupplier;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -653,4 +654,5 @@ public class MapperService extends AbstractIndexComponent {
|
|||
return defaultAnalyzer;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -815,7 +815,7 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
Query query = type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper);
|
||||
if (boost() != 1f) {
|
||||
|
@ -836,7 +836,7 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -233,7 +233,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
Long lo = null;
|
||||
if (lowerTerm != null) {
|
||||
|
@ -288,7 +288,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
|
|||
super(ref);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termsQuery(List<?> values, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||
|
@ -85,7 +86,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
return new TermRangeQuery(name(),
|
||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.AlreadyExpiredException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
@ -139,15 +138,9 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
// Overrides valueForSearch to display live value of remaining ttl
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
long now;
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
if (searchContext != null) {
|
||||
now = searchContext.getQueryShardContext().nowInMillis();
|
||||
} else {
|
||||
now = System.currentTimeMillis();
|
||||
}
|
||||
Long val = (Long) super.valueForSearch(value);
|
||||
public Object valueForDisplay(Object value) {
|
||||
final long now = System.currentTimeMillis();
|
||||
Long val = (Long) super.valueForDisplay(value);
|
||||
return val - now;
|
||||
}
|
||||
}
|
||||
|
@ -177,11 +170,6 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||
return this.defaultTTL;
|
||||
}
|
||||
|
||||
// Other implementation for realtime get display
|
||||
public Object valueForSearch(long expirationTime) {
|
||||
return expirationTime - System.currentTimeMillis();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
|
|
@ -179,7 +179,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
public Object valueForDisplay(Object value) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -377,7 +377,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||
switch(type) {
|
||||
case INDEXED:
|
||||
LegacyGeoPointFieldType geoFieldType = ((LegacyGeoPointFieldType) fieldType);
|
||||
query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
|
||||
query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType, context);
|
||||
break;
|
||||
case MEMORY:
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
|
|
|
@ -306,7 +306,7 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
|
||||
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
|
||||
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance,
|
||||
geoFieldType, indexFieldData, bboxOptimization);
|
||||
geoFieldType, indexFieldData, bboxOptimization, shardContext);
|
||||
}
|
||||
|
||||
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
|
||||
|
|
|
@ -356,7 +356,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
|
||||
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
|
||||
indexFieldData, bboxOptimization);
|
||||
indexFieldData, bboxOptimization, context);
|
||||
}
|
||||
|
||||
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.script.ScriptService;
|
|||
import org.elasticsearch.script.ScriptSettings;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.function.LongSupplier;
|
||||
|
||||
/**
|
||||
* Context object used to rewrite {@link QueryBuilder} instances into simplified version.
|
||||
|
@ -48,10 +49,11 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||
protected final Client client;
|
||||
protected final IndexReader reader;
|
||||
protected final ClusterState clusterState;
|
||||
protected final LongSupplier nowInMillis;
|
||||
|
||||
public QueryRewriteContext(IndexSettings indexSettings, MapperService mapperService, ScriptService scriptService,
|
||||
IndicesQueriesRegistry indicesQueriesRegistry, Client client, IndexReader reader,
|
||||
ClusterState clusterState) {
|
||||
ClusterState clusterState, LongSupplier nowInMillis) {
|
||||
this.mapperService = mapperService;
|
||||
this.scriptService = scriptService;
|
||||
this.indexSettings = indexSettings;
|
||||
|
@ -59,6 +61,7 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||
this.client = client;
|
||||
this.reader = reader;
|
||||
this.clusterState = clusterState;
|
||||
this.nowInMillis = nowInMillis;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,6 +120,10 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||
return new QueryParseContext(defaultScriptLanguage, indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher());
|
||||
}
|
||||
|
||||
public long nowInMillis() {
|
||||
return nowInMillis.getAsLong();
|
||||
}
|
||||
|
||||
public BytesReference getTemplateBytes(Script template) {
|
||||
ExecutableScript executable = scriptService.executable(template,
|
||||
ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
|
|
|
@ -65,7 +65,6 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
/**
|
||||
|
@ -78,6 +77,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
private final BitsetFilterCache bitsetFilterCache;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
private final IndexSettings indexSettings;
|
||||
private final int shardId;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
private boolean cachable = true;
|
||||
private final SetOnce<Boolean> frozen = new SetOnce<>();
|
||||
|
@ -97,13 +97,13 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
private boolean mapUnmappedFieldAsString;
|
||||
private NestedScope nestedScope;
|
||||
private boolean isFilter;
|
||||
private final LongSupplier nowInMillis;
|
||||
|
||||
public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService,
|
||||
MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
|
||||
final IndicesQueriesRegistry indicesQueriesRegistry, Client client,
|
||||
public QueryShardContext(int shardId, IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache,
|
||||
IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService,
|
||||
ScriptService scriptService, final IndicesQueriesRegistry indicesQueriesRegistry, Client client,
|
||||
IndexReader reader, ClusterState clusterState, LongSupplier nowInMillis) {
|
||||
super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState);
|
||||
super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState, nowInMillis);
|
||||
this.shardId = shardId;
|
||||
this.indexSettings = indexSettings;
|
||||
this.similarityService = similarityService;
|
||||
this.mapperService = mapperService;
|
||||
|
@ -112,11 +112,11 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.nestedScope = new NestedScope();
|
||||
this.nowInMillis = nowInMillis;
|
||||
|
||||
}
|
||||
|
||||
public QueryShardContext(QueryShardContext source) {
|
||||
this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
|
||||
this(source.shardId, source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
|
||||
source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client,
|
||||
source.reader, source.clusterState, source.nowInMillis);
|
||||
this.types = source.getTypes();
|
||||
|
@ -264,21 +264,12 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
private SearchLookup lookup = null;
|
||||
|
||||
public SearchLookup lookup() {
|
||||
SearchContext current = SearchContext.current();
|
||||
if (current != null) {
|
||||
return current.lookup();
|
||||
}
|
||||
if (lookup == null) {
|
||||
lookup = new SearchLookup(getMapperService(), indexFieldDataService, null);
|
||||
lookup = new SearchLookup(getMapperService(), indexFieldDataService, types);
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
||||
public long nowInMillis() {
|
||||
failIfFrozen();
|
||||
return nowInMillis.getAsLong();
|
||||
}
|
||||
|
||||
public NestedScope nestedScope() {
|
||||
return nestedScope;
|
||||
}
|
||||
|
@ -411,4 +402,17 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||
public boolean isCachable() {
|
||||
return cachable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the shard ID this context was created for.
|
||||
*/
|
||||
public int getShardId() {
|
||||
return shardId;
|
||||
}
|
||||
|
||||
public long nowInMillis() {
|
||||
failIfFrozen();
|
||||
return super.nowInMillis();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -406,7 +406,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
} else {
|
||||
DateMathParser dateMathParser = format == null ? null : new DateMathParser(format);
|
||||
return fieldType.isFieldWithinQuery(queryRewriteContext.getIndexReader(), from, to, includeLower,
|
||||
includeUpper, timeZone, dateMathParser);
|
||||
includeUpper, timeZone, dateMathParser, queryRewriteContext);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -445,21 +445,21 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||
forcedDateParser = new DateMathParser(this.format);
|
||||
}
|
||||
query = ((LegacyDateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
|
||||
timeZone, forcedDateParser);
|
||||
timeZone, forcedDateParser, context);
|
||||
} else if (mapper instanceof DateFieldMapper.DateFieldType) {
|
||||
DateMathParser forcedDateParser = null;
|
||||
if (this.format != null) {
|
||||
forcedDateParser = new DateMathParser(this.format);
|
||||
}
|
||||
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
|
||||
timeZone, forcedDateParser);
|
||||
timeZone, forcedDateParser, context);
|
||||
} else {
|
||||
if (timeZone != null) {
|
||||
throw new QueryShardException(context, "[range] time_zone can not be applied to non date field ["
|
||||
+ fieldName + "]");
|
||||
}
|
||||
//LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well?
|
||||
query = mapper.rangeQuery(from, to, includeLower, includeUpper);
|
||||
query = mapper.rangeQuery(from, to, includeLower, includeUpper, context);
|
||||
}
|
||||
} else {
|
||||
if (timeZone != null) {
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
|
|||
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
|
@ -315,9 +316,10 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
|
|||
origin = context.nowInMillis();
|
||||
} else {
|
||||
if (dateFieldType instanceof LegacyDateFieldMapper.DateFieldType) {
|
||||
origin = ((LegacyDateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null);
|
||||
origin = ((LegacyDateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null,
|
||||
context);
|
||||
} else {
|
||||
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null);
|
||||
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -131,19 +131,11 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
|||
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
|
||||
return new RandomScoreFunction();
|
||||
}
|
||||
final int salt = (context.index().getName().hashCode() << 10) | getCurrentShardId();
|
||||
final int salt = (context.index().getName().hashCode() << 10) | context.getShardId();
|
||||
final IndexFieldData<?> uidFieldData = context.getForField(fieldType);
|
||||
return new RandomScoreFunction(this.seed == null ? hash(context.nowInMillis()) : seed, salt, uidFieldData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current shard's id for the seed. Protected because this method doesn't work during certain unit tests and needs to be
|
||||
* replaced.
|
||||
*/
|
||||
int getCurrentShardId() {
|
||||
return SearchContext.current().indexShard().shardId().id();
|
||||
}
|
||||
|
||||
private static int hash(long value) {
|
||||
return Long.hashCode(value);
|
||||
}
|
||||
|
|
|
@ -35,8 +35,8 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower,
|
||||
boolean includeUpper, GeoDistance geoDistance,
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType,
|
||||
IndexGeoPointFieldData indexFieldData, String optimizeBbox) {
|
||||
IndexGeoPointFieldData indexFieldData, String optimizeBbox, QueryShardContext context) {
|
||||
this.lat = point.lat();
|
||||
this.lon = point.lon();
|
||||
this.geoDistance = geoDistance;
|
||||
|
@ -96,7 +96,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||
boundingBoxFilter = null;
|
||||
} else if ("indexed".equals(optimizeBbox)) {
|
||||
boundingBoxFilter = LegacyIndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(),
|
||||
distanceBoundingCheck.bottomRight(), fieldType);
|
||||
distanceBoundingCheck.bottomRight(), fieldType, context);
|
||||
distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
|
||||
} else {
|
||||
throw new IllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported");
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -34,34 +35,34 @@ import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
|||
public class LegacyIndexedGeoBoundingBoxQuery {
|
||||
|
||||
public static Query create(GeoPoint topLeft, GeoPoint bottomRight,
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) {
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) {
|
||||
if (!fieldType.isLatLonEnabled()) {
|
||||
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name()
|
||||
+ "], can't use indexed filter on it");
|
||||
}
|
||||
//checks to see if bounding box crosses 180 degrees
|
||||
if (topLeft.lon() > bottomRight.lon()) {
|
||||
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
|
||||
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context);
|
||||
} else {
|
||||
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
|
||||
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context);
|
||||
}
|
||||
}
|
||||
|
||||
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) {
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) {
|
||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||
filter.setMinimumNumberShouldMatch(1);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true), Occur.SHOULD);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, context), Occur.SHOULD);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true, context), Occur.SHOULD);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST);
|
||||
return new ConstantScoreQuery(filter.build());
|
||||
}
|
||||
|
||||
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) {
|
||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) {
|
||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, context), Occur.MUST);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST);
|
||||
return new ConstantScoreQuery(filter.build());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -197,7 +197,8 @@ public class IndicesTTLService extends AbstractLifecycleComponent {
|
|||
|
||||
private void purgeShards(List<IndexShard> shardsToPurge) {
|
||||
for (IndexShard shardToPurge : shardsToPurge) {
|
||||
Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true);
|
||||
Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false,
|
||||
true, null);
|
||||
Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
|
||||
try {
|
||||
logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id());
|
||||
|
|
|
@ -136,7 +136,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||
private SearchContextHighlight highlight;
|
||||
private SuggestionSearchContext suggest;
|
||||
private List<RescoreSearchContext> rescore;
|
||||
private SearchLookup searchLookup;
|
||||
private volatile long keepAlive;
|
||||
private final long originNanoTime = System.nanoTime();
|
||||
private volatile long lastAccessTime = -1;
|
||||
|
@ -168,17 +167,10 @@ final class DefaultSearchContext extends SearchContext {
|
|||
this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy());
|
||||
this.timeEstimateCounter = timeEstimateCounter;
|
||||
this.timeout = timeout;
|
||||
queryShardContext = indexService.newQueryShardContext(searcher.getIndexReader(), request::nowInMillis);
|
||||
queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis);
|
||||
queryShardContext.setTypes(request.types());
|
||||
}
|
||||
|
||||
DefaultSearchContext(DefaultSearchContext source) {
|
||||
this(source.id(), source.request(), source.shardTarget(), source.engineSearcher, source.indexService, source.indexShard(),
|
||||
source.bigArrays(), source.timeEstimateCounter(), source.parseFieldMatcher(), source.timeout(), source.fetchPhase());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void doClose() {
|
||||
// clear and scope phase we have
|
||||
|
@ -742,15 +734,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||
this.keepAlive = keepAlive;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchLookup lookup() {
|
||||
// TODO: The types should take into account the parsing context in QueryParserContext...
|
||||
if (searchLookup == null) {
|
||||
searchLookup = new SearchLookup(mapperService(), fieldData(), request.types());
|
||||
}
|
||||
return searchLookup;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DfsSearchResult dfsResult() {
|
||||
return dfsResult;
|
||||
|
|
|
@ -494,7 +494,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
if (context == null) {
|
||||
throw new SearchContextMissingException(id);
|
||||
}
|
||||
SearchContext.setCurrent(context);
|
||||
return context;
|
||||
}
|
||||
|
||||
|
@ -519,15 +518,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws IOException {
|
||||
final DefaultSearchContext context = createSearchContext(request, defaultSearchTimeout, searcher);
|
||||
try {
|
||||
// we clone the search context here just for rewriting otherwise we
|
||||
// we clone the query shard context here just for rewriting otherwise we
|
||||
// might end up with incorrect state since we are using now() or script services
|
||||
// during rewrite and normalized / evaluate templates etc.
|
||||
// NOTE this context doesn't need to be closed - the outer context will
|
||||
// take care of this.
|
||||
DefaultSearchContext rewriteContext = new DefaultSearchContext(context);
|
||||
SearchContext.setCurrent(rewriteContext);
|
||||
request.rewrite(rewriteContext.getQueryShardContext());
|
||||
SearchContext.setCurrent(context);
|
||||
request.rewrite(new QueryShardContext(context.getQueryShardContext()));
|
||||
assert context.getQueryShardContext().isCachable();
|
||||
if (request.scroll() != null) {
|
||||
context.scrollContext(new ScrollContext());
|
||||
|
@ -620,9 +614,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
|
||||
private void cleanContext(SearchContext context) {
|
||||
try {
|
||||
assert context == SearchContext.current();
|
||||
context.clearReleasables(Lifetime.PHASE);
|
||||
SearchContext.removeCurrent();
|
||||
} finally {
|
||||
context.decRef();
|
||||
}
|
||||
|
|
|
@ -91,14 +91,14 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
: searcher.count(filter);
|
||||
this.bucketCountThresholds = bucketCountThresholds;
|
||||
this.significanceHeuristic = significanceHeuristic;
|
||||
setFieldInfo();
|
||||
setFieldInfo(context.searchContext());
|
||||
|
||||
}
|
||||
|
||||
private void setFieldInfo() {
|
||||
private void setFieldInfo(SearchContext context) {
|
||||
if (!config.unmapped()) {
|
||||
this.indexedFieldName = config.fieldContext().field();
|
||||
fieldType = SearchContext.current().smartNameFieldType(indexedFieldName);
|
||||
fieldType = context.smartNameFieldType(indexedFieldName);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ public final class MatchedQueriesFetchSubPhase implements FetchSubPhase {
|
|||
} catch (IOException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
} finally {
|
||||
SearchContext.current().clearReleasables(Lifetime.COLLECTION);
|
||||
context.clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -80,21 +80,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
// For reference why we use RefCounted here see #20095
|
||||
public abstract class SearchContext extends AbstractRefCounted implements Releasable {
|
||||
|
||||
private static ThreadLocal<SearchContext> current = new ThreadLocal<>();
|
||||
public static final int DEFAULT_TERMINATE_AFTER = 0;
|
||||
|
||||
public static void setCurrent(SearchContext value) {
|
||||
current.set(value);
|
||||
}
|
||||
|
||||
public static void removeCurrent() {
|
||||
current.remove();
|
||||
}
|
||||
|
||||
public static SearchContext current() {
|
||||
return current.get();
|
||||
}
|
||||
|
||||
private Map<Lifetime, List<Releasable>> clearables = null;
|
||||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
private InnerHitsContext innerHitsContext;
|
||||
|
@ -315,7 +301,9 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
|
|||
|
||||
public abstract void keepAlive(long keepAlive);
|
||||
|
||||
public abstract SearchLookup lookup();
|
||||
public SearchLookup lookup() {
|
||||
return getQueryShardContext().lookup();
|
||||
}
|
||||
|
||||
public abstract DfsSearchResult dfsResult();
|
||||
|
||||
|
|
|
@ -342,16 +342,6 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
return fetchSearchResult;
|
||||
}
|
||||
|
||||
private SearchLookup searchLookup;
|
||||
|
||||
@Override
|
||||
public SearchLookup lookup() {
|
||||
if (searchLookup == null) {
|
||||
searchLookup = new SearchLookup(mapperService(), fieldData(), request().types());
|
||||
}
|
||||
return searchLookup;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Counter timeEstimateCounter() {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
|
|
|
@ -30,10 +30,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
@ -102,7 +98,7 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase {
|
|||
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");
|
||||
assertEquals(new BytesRef(value), indexedValue);
|
||||
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
|
||||
Object originalValue = fieldMapper.fieldType().valueForSearch(indexedValue);
|
||||
Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue);
|
||||
assertEquals(new BytesArray(value), originalValue);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.index.mapper.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
||||
|
@ -44,11 +42,11 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals(true, ft.valueForSearch("T"));
|
||||
assertEquals(false, ft.valueForSearch("F"));
|
||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForSearch(0));
|
||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForSearch("true"));
|
||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForSearch("G"));
|
||||
assertEquals(true, ft.valueForDisplay("T"));
|
||||
assertEquals(false, ft.valueForDisplay("F"));
|
||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay(0));
|
||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay("true"));
|
||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay("G"));
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
|
|
|
@ -28,15 +28,10 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
@ -71,31 +66,31 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
DateFieldType ft = new DateFieldType();
|
||||
ft.setName("my_date");
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, true, null, null));
|
||||
true, true, null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, false, null, null));
|
||||
false, false, null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, true, null, null));
|
||||
false, true, null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, false, null, null));
|
||||
true, false, null, null, null));
|
||||
}
|
||||
|
||||
public void testIsFieldWithinQuery() throws IOException {
|
||||
|
@ -121,7 +116,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
// Fields with no value indexed.
|
||||
DateFieldType ft2 = new DateFieldType();
|
||||
ft2.setName("my_date2");
|
||||
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null));
|
||||
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, null));
|
||||
IOUtils.close(reader, w, dir);
|
||||
}
|
||||
|
||||
|
@ -146,7 +141,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
String date = "2015-10-12T12:09:55.000Z";
|
||||
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||
assertEquals(date, ft.valueForSearch(instant));
|
||||
assertEquals(date, ft.valueForDisplay(instant));
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
|
@ -172,11 +167,11 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||
long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date2).getMillis();
|
||||
ft.setIndexOptions(IndexOptions.DOCS);
|
||||
assertEquals(LongPoint.newRangeQuery("field", instant1, instant2),
|
||||
ft.rangeQuery(date1, date2, true, true).rewrite(new MultiReader()));
|
||||
ft.rangeQuery(date1, date2, true, true, null).rewrite(new MultiReader()));
|
||||
|
||||
ft.setIndexOptions(IndexOptions.NONE);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery(date1, date2, true, true));
|
||||
() -> ft.rangeQuery(date1, date2, true, true, null));
|
||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public class IdFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("_id");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null));
|
||||
assertEquals("Field [_id] of type [_id] does not support range queries", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,8 +25,6 @@ import org.apache.lucene.index.IndexOptions;
|
|||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
public class IpFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
|
@ -49,11 +47,11 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
String ip = "2001:db8::2:1";
|
||||
BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip)));
|
||||
assertEquals(ip, ft.valueForSearch(asBytes));
|
||||
assertEquals(ip, ft.valueForDisplay(asBytes));
|
||||
|
||||
ip = "192.168.1.7";
|
||||
asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip)));
|
||||
assertEquals(ip, ft.valueForSearch(asBytes));
|
||||
assertEquals(ip, ft.valueForDisplay(asBytes));
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
|
@ -88,83 +86,83 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::"),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
||||
ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::"),
|
||||
InetAddresses.forString("192.168.2.0")),
|
||||
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true));
|
||||
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true, null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::"),
|
||||
InetAddresses.forString("192.168.1.255")),
|
||||
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false));
|
||||
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false, null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::"),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
ft.rangeQuery("2001:db8::", null, true, randomBoolean()));
|
||||
ft.rangeQuery("2001:db8::", null, true, randomBoolean(), null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::1"),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
ft.rangeQuery("2001:db8::", null, false, randomBoolean()));
|
||||
ft.rangeQuery("2001:db8::", null, false, randomBoolean(), null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::"),
|
||||
InetAddresses.forString("2001:db8::ffff")),
|
||||
ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true));
|
||||
ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true, null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::1"),
|
||||
InetAddresses.forString("2001:db8::fffe")),
|
||||
ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false));
|
||||
ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false, null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::2"),
|
||||
InetAddresses.forString("2001:db8::")),
|
||||
// same lo/hi values but inclusive=false so this won't match anything
|
||||
ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false));
|
||||
ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false, null));
|
||||
|
||||
// Upper bound is the min IP and is not inclusive
|
||||
assertEquals(new MatchNoDocsQuery(),
|
||||
ft.rangeQuery("::", "::", true, false));
|
||||
ft.rangeQuery("::", "::", true, false, null));
|
||||
|
||||
// Lower bound is the max IP and is not inclusive
|
||||
assertEquals(new MatchNoDocsQuery(),
|
||||
ft.rangeQuery("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true));
|
||||
ft.rangeQuery("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::"),
|
||||
InetAddresses.forString("::fffe:ffff:ffff")),
|
||||
// same lo/hi values but inclusive=false so this won't match anything
|
||||
ft.rangeQuery("::", "0.0.0.0", true, false));
|
||||
ft.rangeQuery("::", "0.0.0.0", true, false, null));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::1:0:0:0"),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
// same lo/hi values but inclusive=false so this won't match anything
|
||||
ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true));
|
||||
ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null));
|
||||
|
||||
assertEquals(
|
||||
// lower bound is ipv4, upper bound is ipv6
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("192.168.1.7"),
|
||||
InetAddresses.forString("2001:db8::")),
|
||||
ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true));
|
||||
ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null));
|
||||
|
||||
ft.setIndexOptions(IndexOptions.NONE);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery("::1", "2001::", true, true));
|
||||
() -> ft.rangeQuery("::1", "2001::", true, true, null));
|
||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase {
|
|||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null,
|
||||
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
|
||||
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.LegacyByteFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class LegacyByteFieldTypeTests extends FieldTypeTestCase {
|
||||
|
@ -36,6 +34,6 @@ public class LegacyByteFieldTypeTests extends FieldTypeTestCase {
|
|||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
// bytes are stored as ints
|
||||
assertEquals(Byte.valueOf((byte) 3), ft.valueForSearch(Integer.valueOf(3)));
|
||||
assertEquals(Byte.valueOf((byte) 3), ft.valueForDisplay(Integer.valueOf(3)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -249,13 +249,8 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.bytes());
|
||||
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())));
|
||||
|
||||
LegacyNumericRangeQuery<Long> rangeQuery;
|
||||
try {
|
||||
SearchContext.setCurrent(new TestSearchContext(null));
|
||||
rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null);
|
||||
} finally {
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
LegacyNumericRangeQuery<Long> rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType()
|
||||
.rangeQuery("10:00:00", "11:00:00", true, true, null).rewrite(null);
|
||||
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(11).millis(), DateTimeZone.UTC).getMillis()));
|
||||
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()));
|
||||
}
|
||||
|
@ -275,13 +270,8 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.bytes());
|
||||
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())));
|
||||
|
||||
LegacyNumericRangeQuery<Long> rangeQuery;
|
||||
try {
|
||||
SearchContext.setCurrent(new TestSearchContext(null));
|
||||
rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null);
|
||||
} finally {
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
LegacyNumericRangeQuery<Long> rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType()
|
||||
.rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null).rewrite(null);
|
||||
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(35).millis(), DateTimeZone.UTC).getMillis()));
|
||||
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()));
|
||||
}
|
||||
|
|
|
@ -29,8 +29,6 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
|
@ -75,31 +73,31 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||
DateFieldType ft = new DateFieldType();
|
||||
ft.setName("my_date");
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, true, null, null));
|
||||
true, true, null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, false, null, null));
|
||||
false, false, null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
false, true, null, null));
|
||||
false, true, null, null, null));
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||
true, false, null, null));
|
||||
true, false, null, null, null));
|
||||
}
|
||||
|
||||
public void testIsFieldWithinQuery() throws IOException {
|
||||
|
@ -145,6 +143,6 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
String date = "2015-10-12T12:09:55.000Z";
|
||||
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||
assertEquals(date, ft.valueForSearch(instant));
|
||||
assertEquals(date, ft.valueForDisplay(instant));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.junit.Before;
|
||||
|
@ -41,11 +39,11 @@ public class LegacyDoubleFieldTypeTests extends FieldTypeTestCase {
|
|||
DoubleFieldType ft = new DoubleFieldType();
|
||||
// current impl ignores args and shourd always return INTERSECTS
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(),
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals(Double.valueOf(1.2), ft.valueForSearch(1.2));
|
||||
assertEquals(Double.valueOf(1.2), ft.valueForDisplay(1.2));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.LegacyFloatFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.LegacyFloatFieldMapper.FloatFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.junit.Before;
|
||||
|
@ -41,11 +39,11 @@ public class LegacyFloatFieldTypeTests extends FieldTypeTestCase {
|
|||
FloatFieldType ft = new FloatFieldType();
|
||||
// current impl ignores args and shourd always return INTERSECTS
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomFloat(), randomFloat(),
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals(Float.valueOf(1.2f), ft.valueForSearch(1.2f));
|
||||
assertEquals(Float.valueOf(1.2f), ft.valueForDisplay(1.2f));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper.IntegerFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.junit.Before;
|
||||
|
@ -41,11 +39,11 @@ public class LegacyIntegerFieldTypeTests extends FieldTypeTestCase {
|
|||
IntegerFieldType ft = new IntegerFieldType();
|
||||
// current impl ignores args and shourd always return INTERSECTS
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomInt(), randomInt(),
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals(Integer.valueOf(3), ft.valueForSearch(Integer.valueOf(3)));
|
||||
assertEquals(Integer.valueOf(3), ft.valueForDisplay(Integer.valueOf(3)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.LegacyLongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.LegacyLongFieldMapper.LongFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.junit.Before;
|
||||
|
@ -41,11 +39,11 @@ public class LegacyLongFieldTypeTests extends FieldTypeTestCase {
|
|||
LongFieldType ft = new LongFieldType();
|
||||
// current impl ignores args and shourd always return INTERSECTS
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomLong(), randomLong(),
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
assertEquals(Long.valueOf(3), ft.valueForSearch(Long.valueOf(3)));
|
||||
assertEquals(Long.valueOf(3), ft.valueForDisplay(Long.valueOf(3)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.LegacyShortFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.junit.Before;
|
||||
|
||||
public class LegacyShortFieldTypeTests extends FieldTypeTestCase {
|
||||
|
@ -36,6 +34,6 @@ public class LegacyShortFieldTypeTests extends FieldTypeTestCase {
|
|||
public void testValueForSearch() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
// shorts are stored as ints
|
||||
assertEquals(Short.valueOf((short) 3), ft.valueForSearch(Integer.valueOf(3)));
|
||||
assertEquals(Short.valueOf((short) 3), ft.valueForDisplay(Integer.valueOf(3)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
// current impl ignores args and should always return INTERSECTS
|
||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(),
|
||||
randomBoolean(), randomBoolean(), null, null));
|
||||
randomBoolean(), randomBoolean(), null, null, null));
|
||||
}
|
||||
|
||||
public void testTermQuery() {
|
||||
|
@ -78,11 +78,11 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
ft.setName("field");
|
||||
ft.setIndexOptions(IndexOptions.DOCS);
|
||||
assertEquals(LongPoint.newRangeQuery("field", 1, 3), ft.rangeQuery("1", "3", true, true));
|
||||
assertEquals(LongPoint.newRangeQuery("field", 1, 3), ft.rangeQuery("1", "3", true, true, null));
|
||||
|
||||
ft.setIndexOptions(IndexOptions.NONE);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery("1", "3", true, true));
|
||||
() -> ft.rangeQuery("1", "3", true, true, null));
|
||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||
}
|
||||
|
||||
|
|
|
@ -38,9 +38,6 @@ import org.elasticsearch.index.IndexSettings;
|
|||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ScaledFloatFieldMapper;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -122,7 +119,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
|||
boolean includeLower = randomBoolean();
|
||||
boolean includeUpper = randomBoolean();
|
||||
Query doubleQ = NumberFieldMapper.NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper);
|
||||
Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper);
|
||||
Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper, null);
|
||||
assertEquals(searcher.count(doubleQ), searcher.count(scaledFloatQ));
|
||||
}
|
||||
IOUtils.close(reader, dir);
|
||||
|
@ -132,8 +129,8 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
|||
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
|
||||
ft.setName("scaled_float");
|
||||
ft.setScalingFactor(0.1 + randomDouble() * 100);
|
||||
assertNull(ft.valueForSearch(null));
|
||||
assertEquals(10/ft.getScalingFactor(), ft.valueForSearch(10L));
|
||||
assertNull(ft.valueForDisplay(null));
|
||||
assertEquals(10/ft.getScalingFactor(), ft.valueForDisplay(10L));
|
||||
}
|
||||
|
||||
public void testStats() throws IOException {
|
||||
|
|
|
@ -18,10 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.TimestampFieldMapper;
|
||||
|
||||
public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
|
@ -33,6 +29,6 @@ public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
String date = "2015-10-12T12:09:55.000Z";
|
||||
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||
assertEquals(instant, ft.valueForSearch(instant));
|
||||
assertEquals(instant, ft.valueForDisplay(instant));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ public class UidFieldTypeTests extends FieldTypeTestCase {
|
|||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("_uid");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null));
|
||||
assertEquals("Field [_uid] of type [_uid] does not support range queries", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
|
@ -80,10 +81,11 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||
if (!queryBuilder.hasClauses()) {
|
||||
assertThat(query, instanceOf(MatchAllDocsQuery.class));
|
||||
} else {
|
||||
QueryShardContext context = searchContext.getQueryShardContext();
|
||||
List<BooleanClause> clauses = new ArrayList<>();
|
||||
clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context));
|
||||
clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context));
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -43,9 +44,9 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase<BoostingQue
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Query positive = queryBuilder.positiveQuery().toQuery(context);
|
||||
Query negative = queryBuilder.negativeQuery().toQuery(context);
|
||||
protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Query positive = queryBuilder.positiveQuery().toQuery(context.getQueryShardContext());
|
||||
Query negative = queryBuilder.negativeQuery().toQuery(context.getQueryShardContext());
|
||||
if (positive == null || negative == null) {
|
||||
assertThat(query, nullValue());
|
||||
} else {
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -99,7 +100,7 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(ExtendedCommonTermsQuery.class));
|
||||
ExtendedCommonTermsQuery extendedCommonTermsQuery = (ExtendedCommonTermsQuery) query;
|
||||
assertThat(extendedCommonTermsQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.highFreqMinimumShouldMatch()));
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
|||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -45,8 +46,8 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(ConstantScoreQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Query innerQuery = queryBuilder.innerQuery().toQuery(context);
|
||||
protected void doAssertLuceneQuery(ConstantScoreQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Query innerQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext());
|
||||
if (innerQuery == null) {
|
||||
assertThat(query, nullValue());
|
||||
} else {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.PrefixQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -57,8 +58,8 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Collection<Query> queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context);
|
||||
protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Collection<Query> queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context.getQueryShardContext());
|
||||
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
|
||||
DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query;
|
||||
assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker()));
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -55,9 +56,9 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase<ExistsQueryBu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
String fieldPattern = queryBuilder.fieldName();
|
||||
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
|
||||
Collection<String> fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern);
|
||||
if (getCurrentTypes().length == 0) {
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -43,16 +44,16 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase<Fie
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
String fieldInQuery = queryBuilder.fieldName();
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldInQuery);
|
||||
MappedFieldType fieldType = context.getQueryShardContext().fieldMapper(fieldInQuery);
|
||||
if (fieldType != null) {
|
||||
fieldInQuery = fieldType.name();
|
||||
}
|
||||
assertThat(query, instanceOf(FieldMaskingSpanQuery.class));
|
||||
FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query;
|
||||
assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery));
|
||||
assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context)));
|
||||
assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context.getQueryShardContext())));
|
||||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.FuzzyQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -72,7 +73,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(FuzzyQuery.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.locationtech.spatial4j.io.GeohashUtils;
|
||||
|
@ -254,8 +255,9 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
||||
protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, SearchContext searchContext)
|
||||
throws IOException {
|
||||
QueryShardContext context = searchContext.getQueryShardContext();
|
||||
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
||||
if (fieldType == null) {
|
||||
assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.document.LatLonPoint;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery;
|
||||
|
@ -31,6 +30,7 @@ import org.elasticsearch.common.geo.GeoUtils;
|
|||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
|
@ -130,8 +130,8 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Version version = context.indexVersionCreated();
|
||||
protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Version version = context.getQueryShardContext().indexVersionCreated();
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertLegacyQuery(queryBuilder, query);
|
||||
} else {
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
|||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||
|
||||
|
@ -118,9 +119,9 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
||||
protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, SearchContext context)
|
||||
throws IOException {
|
||||
Version version = context.indexVersionCreated();
|
||||
Version version = context.getQueryShardContext().indexVersionCreated();
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertLegacyQuery(queryBuilder, query);
|
||||
} else {
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import org.apache.lucene.document.LatLonPoint;
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery;
|
||||
|
@ -34,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||
|
@ -67,8 +66,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Version version = context.indexVersionCreated();
|
||||
protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Version version = context.getQueryShardContext().indexVersionCreated();
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertLegacyQuery(queryBuilder, query);
|
||||
} else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||
|
@ -133,7 +134,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
// Logic for doToQuery is complex and is hard to test here. Need to rely
|
||||
// on Integration tests to determine if created query is correct
|
||||
// TODO improve GeoShapeQueryBuilder.doToQuery() method to make it
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
|
|||
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||
import org.elasticsearch.index.query.GeohashCellQuery.Builder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
|
@ -64,7 +65,7 @@ public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase<Builder>
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(Builder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(Builder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
if (queryBuilder.neighbors()) {
|
||||
assertThat(query, instanceOf(TermsQuery.class));
|
||||
} else {
|
||||
|
|
|
@ -120,7 +120,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
||||
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
||||
assertEquals(queryBuilder.minChildren(), lpq.getMinChildren());
|
||||
|
@ -129,9 +129,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
if (queryBuilder.innerHit() != null) {
|
||||
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
||||
// doCreateTestQueryBuilder)
|
||||
queryBuilder = (HasChildQueryBuilder) queryBuilder.rewrite(context);
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
assertNotNull(searchContext);
|
||||
queryBuilder = (HasChildQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||
for (InnerHitBuilder builder : innerHitBuilders.values()) {
|
||||
|
|
|
@ -100,7 +100,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
||||
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
||||
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
|
||||
|
@ -108,9 +108,8 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
if (queryBuilder.innerHit() != null) {
|
||||
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
||||
// doCreateTestQueryBuilder)
|
||||
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(context);
|
||||
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
|
||||
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
assertNotNull(searchContext);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
|||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -80,7 +81,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
if (queryBuilder.ids().size() == 0) {
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
} else {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -50,12 +51,12 @@ public class IndicesQueryBuilderTests extends AbstractQueryTestCase<IndicesQuery
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(IndicesQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(IndicesQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Query expected;
|
||||
if (queryBuilder.indices().length == 1 && getIndex().getName().equals(queryBuilder.indices()[0])) {
|
||||
expected = queryBuilder.innerQuery().toQuery(context);
|
||||
expected = queryBuilder.innerQuery().toQuery(context.getQueryShardContext());
|
||||
} else {
|
||||
expected = queryBuilder.noMatchQuery().toQuery(context);
|
||||
expected = queryBuilder.noMatchQuery().toQuery(context.getQueryShardContext());
|
||||
}
|
||||
assertEquals(expected, query);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -35,7 +36,7 @@ public class MatchAllQueryBuilderTests extends AbstractQueryTestCase<MatchAllQue
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(MatchAllDocsQuery.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -35,7 +36,7 @@ public class MatchNoneQueryBuilderTests extends AbstractQueryTestCase<MatchNoneQ
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchNoneQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(MatchNoneQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -88,7 +89,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
||||
protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, SearchContext context)
|
||||
throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
assertThat(query,
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -84,7 +85,7 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.search.MatchQuery.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
|
||||
|
@ -135,7 +136,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
|
||||
if (query instanceof MatchAllDocsQuery) {
|
||||
|
@ -160,7 +161,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
.or(instanceOf(PointRangeQuery.class)));
|
||||
break;
|
||||
}
|
||||
|
||||
QueryShardContext context = searchContext.getQueryShardContext();
|
||||
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
||||
if (query instanceof TermQuery && fieldType != null) {
|
||||
String queryValue = queryBuilder.value().toString();
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -243,7 +244,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) {
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
} else {
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
|||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -142,7 +143,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
// we rely on integration tests for deeper checks here
|
||||
assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
|
||||
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
|
||||
|
|
|
@ -89,7 +89,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||
QueryBuilder innerQueryBuilder = queryBuilder.query();
|
||||
assertThat(query, instanceOf(ToParentBlockJoinQuery.class));
|
||||
ToParentBlockJoinQuery parentBlockJoinQuery = (ToParentBlockJoinQuery) query;
|
||||
|
@ -97,9 +97,8 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||
if (queryBuilder.innerHit() != null) {
|
||||
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
||||
// doCreateTestQueryBuilder)
|
||||
queryBuilder = (NestedQueryBuilder) queryBuilder.rewrite(context);
|
||||
queryBuilder = (NestedQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
|
||||
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
assertNotNull(searchContext);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
|||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.TypeFieldMapper;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
|
@ -69,7 +70,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, Matchers.instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||
assertThat(booleanQuery.clauses().size(), Matchers.equalTo(2));
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.MultiTermQuery;
|
|||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -65,7 +66,7 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(PrefixQuery.class));
|
||||
PrefixQuery prefixQuery = (PrefixQuery) query;
|
||||
assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName()));
|
||||
|
|
|
@ -42,11 +42,12 @@ public class QueryRewriteContextTests extends ESTestCase {
|
|||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 1)
|
||||
);
|
||||
final long nowInMills = randomPositiveLong();
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = new SearchModule(Settings.EMPTY, false, emptyList()).getQueryParserRegistry();
|
||||
IndexSettings indexSettings = new IndexSettings(indexMetadata.build(),
|
||||
Settings.builder().put(ScriptSettings.LEGACY_SCRIPT_SETTING, defaultLegacyScriptLanguage).build());
|
||||
QueryRewriteContext queryRewriteContext =
|
||||
new QueryRewriteContext(indexSettings, null, null, indicesQueriesRegistry, null, null, null);;
|
||||
new QueryRewriteContext(indexSettings, null, null, indicesQueriesRegistry, null, null, null, () -> nowInMills);
|
||||
|
||||
// verify that the default script language in the query parse context is equal to defaultLegacyScriptLanguage variable:
|
||||
QueryParseContext queryParseContext =
|
||||
|
|
|
@ -48,7 +48,7 @@ public class QueryShardContextTests extends ESTestCase {
|
|||
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
|
||||
final long nowInMillis = randomPositiveLong();
|
||||
QueryShardContext context = new QueryShardContext(
|
||||
indexSettings, null, null, mapperService, null, null, null, null, null, null,
|
||||
0, indexSettings, null, null, mapperService, null, null, null, null, null, null,
|
||||
() -> nowInMillis);
|
||||
|
||||
context.setAllowUnmappedFields(false);
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -155,7 +156,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder,
|
||||
Query query, QueryShardContext context) throws IOException {
|
||||
Query query, SearchContext context) throws IOException {
|
||||
if ("".equals(queryBuilder.queryString())) {
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
} else {
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -116,7 +117,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
|
||||
assertThat(query, instanceOf(TermRangeQuery.class));
|
||||
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
|
||||
|
@ -328,7 +329,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
|
||||
QueryShardContext context = createShardContext();
|
||||
Query parsedQuery = parseQuery(query).toQuery(context).rewrite(null);
|
||||
if (parsedQuery instanceof PointRangeQuery) {
|
||||
// TODO what can we assert
|
||||
} else {
|
||||
|
@ -336,13 +338,13 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
|
||||
// Min value was 2012-01-01 (UTC) so we need to remove one hour
|
||||
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
|
||||
// Max value is when we started the test. So it should be some ms from now
|
||||
DateTime max = new DateTime(startDate, DateTimeZone.UTC);
|
||||
// Max value is the nowInMillis set by the uery shard context
|
||||
long max = context.nowInMillis();
|
||||
|
||||
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
|
||||
|
||||
// We should not have a big difference here (should be some ms)
|
||||
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L));
|
||||
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max, lessThanOrEqualTo(60000L));
|
||||
}
|
||||
|
||||
query = "{\n" +
|
||||
|
|
|
@ -36,8 +36,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||
public void testRewriteMissingField() throws Exception {
|
||||
IndexService indexService = createIndex("test");
|
||||
IndexReader reader = new MultiReader();
|
||||
QueryRewriteContext context = new QueryRewriteContext(indexService.getIndexSettings(),
|
||||
indexService.mapperService(), null, null, null, reader, null);
|
||||
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||
null, null, null, null, reader, null, null);
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
||||
}
|
||||
|
@ -53,8 +53,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
indexService.mapperService().merge("type",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
QueryRewriteContext context = new QueryRewriteContext(indexService.getIndexSettings(),
|
||||
indexService.mapperService(), null, null, null, null, null);
|
||||
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||
null, null, null, null, null, null, null);
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||
// can't make assumptions on a missing reader, so it must return INTERSECT
|
||||
assertEquals(Relation.INTERSECTS, range.getRelation(context));
|
||||
|
@ -72,8 +72,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||
indexService.mapperService().merge("type",
|
||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
IndexReader reader = new MultiReader();
|
||||
QueryRewriteContext context = new QueryRewriteContext(indexService.getIndexSettings(),
|
||||
indexService.mapperService(), null, null, null, reader, null);
|
||||
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||
null, null, null, null, reader, null, null);
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||
// no values -> DISJOINT
|
||||
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -76,7 +77,7 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(RegexpQuery.class));
|
||||
RegexpQuery regexpQuery = (RegexpQuery) query;
|
||||
assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName()));
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -41,7 +42,7 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase<ScriptQueryBu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(ScriptQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(ScriptQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(ScriptQueryBuilder.ScriptQuery.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -241,7 +242,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
* actual functionality of query parsing.
|
||||
*/
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
|
||||
if ("".equals(queryBuilder.value())) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanContainingQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -35,7 +36,7 @@ public class SpanContainingQueryBuilderTests extends AbstractQueryTestCase<SpanC
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(SpanContainingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(SpanContainingQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(SpanContainingQuery.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.spans.SpanFirstQuery;
|
|||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -39,7 +40,7 @@ public class SpanFirstQueryBuilderTests extends AbstractQueryTestCase<SpanFirstQ
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(SpanFirstQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(SpanFirstQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, instanceOf(SpanFirstQuery.class));
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.spans.SpanBoostQuery;
|
||||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -41,7 +42,7 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
|
||||
assertThat(query, instanceOf(SpanBoostQuery.class));
|
||||
SpanBoostQuery boostQuery = (SpanBoostQuery) query;
|
||||
|
@ -50,7 +51,7 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||
}
|
||||
assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class));
|
||||
SpanMultiTermQueryWrapper spanMultiTermQueryWrapper = (SpanMultiTermQueryWrapper) query;
|
||||
Query multiTermQuery = queryBuilder.innerQuery().toQuery(context);
|
||||
Query multiTermQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext());
|
||||
if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
|
||||
assertThat(multiTermQuery, instanceOf(BoostQuery.class));
|
||||
BoostQuery boostQuery = (BoostQuery) multiTermQuery;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue