mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-08 22:14:59 +00:00
Remove SearchContext#current and all it's threadlocals (#20778)
Today SearchContext expose the current context as a thread local which makes any kind of sane interface design very very hard. This PR removes the thread local entirely and instead passes the relevant context anywhere needed. This simplifies state management dramatically and will allow for a much leaner SearchContext interface down the road.
This commit is contained in:
parent
d7d5df8863
commit
9c9afe3f01
@ -180,17 +180,17 @@ public class MapperQueryParser extends QueryParser {
|
|||||||
if (queryText.charAt(0) == '>') {
|
if (queryText.charAt(0) == '>') {
|
||||||
if (queryText.length() > 2) {
|
if (queryText.length() > 2) {
|
||||||
if (queryText.charAt(1) == '=') {
|
if (queryText.charAt(1) == '=') {
|
||||||
return getRangeQuerySingle(field, queryText.substring(2), null, true, true);
|
return getRangeQuerySingle(field, queryText.substring(2), null, true, true, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return getRangeQuerySingle(field, queryText.substring(1), null, false, true);
|
return getRangeQuerySingle(field, queryText.substring(1), null, false, true, context);
|
||||||
} else if (queryText.charAt(0) == '<') {
|
} else if (queryText.charAt(0) == '<') {
|
||||||
if (queryText.length() > 2) {
|
if (queryText.length() > 2) {
|
||||||
if (queryText.charAt(1) == '=') {
|
if (queryText.charAt(1) == '=') {
|
||||||
return getRangeQuerySingle(field, null, queryText.substring(2), true, true);
|
return getRangeQuerySingle(field, null, queryText.substring(2), true, true, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
|
return getRangeQuerySingle(field, null, queryText.substring(1), true, false, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
currentFieldType = null;
|
currentFieldType = null;
|
||||||
@ -290,19 +290,19 @@ public class MapperQueryParser extends QueryParser {
|
|||||||
Collection<String> fields = extractMultiFields(field);
|
Collection<String> fields = extractMultiFields(field);
|
||||||
|
|
||||||
if (fields == null) {
|
if (fields == null) {
|
||||||
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive);
|
return getRangeQuerySingle(field, part1, part2, startInclusive, endInclusive, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (fields.size() == 1) {
|
if (fields.size() == 1) {
|
||||||
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive);
|
return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (settings.useDisMax()) {
|
if (settings.useDisMax()) {
|
||||||
List<Query> queries = new ArrayList<>();
|
List<Query> queries = new ArrayList<>();
|
||||||
boolean added = false;
|
boolean added = false;
|
||||||
for (String mField : fields) {
|
for (String mField : fields) {
|
||||||
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
|
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context);
|
||||||
if (q != null) {
|
if (q != null) {
|
||||||
added = true;
|
added = true;
|
||||||
queries.add(applyBoost(mField, q));
|
queries.add(applyBoost(mField, q));
|
||||||
@ -315,7 +315,7 @@ public class MapperQueryParser extends QueryParser {
|
|||||||
} else {
|
} else {
|
||||||
List<BooleanClause> clauses = new ArrayList<>();
|
List<BooleanClause> clauses = new ArrayList<>();
|
||||||
for (String mField : fields) {
|
for (String mField : fields) {
|
||||||
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
|
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context);
|
||||||
if (q != null) {
|
if (q != null) {
|
||||||
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
|
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
|
||||||
}
|
}
|
||||||
@ -326,7 +326,7 @@ public class MapperQueryParser extends QueryParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Query getRangeQuerySingle(String field, String part1, String part2,
|
private Query getRangeQuerySingle(String field, String part1, String part2,
|
||||||
boolean startInclusive, boolean endInclusive) {
|
boolean startInclusive, boolean endInclusive, QueryShardContext context) {
|
||||||
currentFieldType = context.fieldMapper(field);
|
currentFieldType = context.fieldMapper(field);
|
||||||
if (currentFieldType != null) {
|
if (currentFieldType != null) {
|
||||||
if (lowercaseExpandedTerms && currentFieldType.tokenized()) {
|
if (lowercaseExpandedTerms && currentFieldType.tokenized()) {
|
||||||
@ -338,12 +338,12 @@ public class MapperQueryParser extends QueryParser {
|
|||||||
Query rangeQuery;
|
Query rangeQuery;
|
||||||
if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
||||||
LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType;
|
LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType;
|
||||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
|
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, context);
|
||||||
} else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
} else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
||||||
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
|
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
|
||||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
|
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, context);
|
||||||
} else {
|
} else {
|
||||||
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive);
|
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, context);
|
||||||
}
|
}
|
||||||
return rangeQuery;
|
return rangeQuery;
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
|
@ -152,7 +152,6 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||||||
ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(request.shardId(), request.types(),
|
ShardSearchLocalRequest shardSearchLocalRequest = new ShardSearchLocalRequest(request.shardId(), request.types(),
|
||||||
request.nowInMillis(), request.filteringAliases());
|
request.nowInMillis(), request.filteringAliases());
|
||||||
SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
|
SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
|
||||||
SearchContext.setCurrent(searchContext);
|
|
||||||
try {
|
try {
|
||||||
ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query());
|
ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query());
|
||||||
searchContext.parsedQuery(parsedQuery);
|
searchContext.parsedQuery(parsedQuery);
|
||||||
@ -166,7 +165,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||||||
valid = false;
|
valid = false;
|
||||||
error = e.getMessage();
|
error = e.getMessage();
|
||||||
} finally {
|
} finally {
|
||||||
Releasables.close(searchContext, () -> SearchContext.removeCurrent());
|
Releasables.close(searchContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error);
|
return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error);
|
||||||
|
@ -91,7 +91,6 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||||||
new String[]{request.type()}, request.nowInMillis, request.filteringAlias());
|
new String[]{request.type()}, request.nowInMillis, request.filteringAlias());
|
||||||
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
|
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT, null);
|
||||||
Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
|
Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
|
||||||
SearchContext.setCurrent(context);
|
|
||||||
Engine.GetResult result = null;
|
Engine.GetResult result = null;
|
||||||
try {
|
try {
|
||||||
result = context.indexShard().get(new Engine.Get(false, uidTerm));
|
result = context.indexShard().get(new Engine.Get(false, uidTerm));
|
||||||
@ -118,7 +117,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new ElasticsearchException("Could not explain", e);
|
throw new ElasticsearchException("Could not explain", e);
|
||||||
} finally {
|
} finally {
|
||||||
Releasables.close(result, context, () -> SearchContext.removeCurrent());
|
Releasables.close(result, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,9 +449,9 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||||||
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
|
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
|
||||||
* {@link QueryShardContext#setTypes(String...)}
|
* {@link QueryShardContext#setTypes(String...)}
|
||||||
*/
|
*/
|
||||||
public QueryShardContext newQueryShardContext(IndexReader indexReader, LongSupplier nowInMillis) {
|
public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) {
|
||||||
return new QueryShardContext(
|
return new QueryShardContext(
|
||||||
indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
|
shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(),
|
||||||
similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(),
|
similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry(),
|
||||||
nodeServicesProvider.getClient(), indexReader,
|
nodeServicesProvider.getClient(), indexReader,
|
||||||
nodeServicesProvider.getClusterService().state(),
|
nodeServicesProvider.getClusterService().state(),
|
||||||
@ -464,7 +464,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||||||
* used for rewriting since it does not know about the current {@link IndexReader}.
|
* used for rewriting since it does not know about the current {@link IndexReader}.
|
||||||
*/
|
*/
|
||||||
public QueryShardContext newQueryShardContext() {
|
public QueryShardContext newQueryShardContext() {
|
||||||
return newQueryShardContext(null, threadPool::estimatedTimeInMillis);
|
return newQueryShardContext(0, null, threadPool::estimatedTimeInMillis);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ThreadPool getThreadPool() {
|
public ThreadPool getThreadPool() {
|
||||||
|
@ -90,7 +90,7 @@ public class FieldsVisitor extends StoredFieldVisitor {
|
|||||||
}
|
}
|
||||||
List<Object> fieldValues = entry.getValue();
|
List<Object> fieldValues = entry.getValue();
|
||||||
for (int i = 0; i < fieldValues.size(); i++) {
|
for (int i = 0; i < fieldValues.size(); i++) {
|
||||||
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
|
fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ public class SingleFieldsVisitor extends FieldsVisitor {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for (int i = 0; i < fieldValues.size(); i++) {
|
for (int i = 0; i < fieldValues.size(); i++) {
|
||||||
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
|
fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesReference valueForSearch(Object value) {
|
public BytesReference valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
@ -164,7 +165,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Boolean valueForSearch(Object value) {
|
public Boolean valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -197,7 +198,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
return new TermRangeQuery(name(),
|
return new TermRangeQuery(name(),
|
||||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||||
|
@ -43,9 +43,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults;
|
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults;
|
||||||
|
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -172,15 +172,17 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
final boolean includeUpper;
|
final boolean includeUpper;
|
||||||
final DateTimeZone timeZone;
|
final DateTimeZone timeZone;
|
||||||
final DateMathParser forcedDateParser;
|
final DateMathParser forcedDateParser;
|
||||||
|
private QueryShardContext queryShardContext;
|
||||||
|
|
||||||
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||||
DateTimeZone timeZone, DateMathParser forcedDateParser) {
|
DateTimeZone timeZone, DateMathParser forcedDateParser, QueryShardContext queryShardContext) {
|
||||||
this.lowerTerm = lowerTerm;
|
this.lowerTerm = lowerTerm;
|
||||||
this.upperTerm = upperTerm;
|
this.upperTerm = upperTerm;
|
||||||
this.includeLower = includeLower;
|
this.includeLower = includeLower;
|
||||||
this.includeUpper = includeUpper;
|
this.includeUpper = includeUpper;
|
||||||
this.timeZone = timeZone;
|
this.timeZone = timeZone;
|
||||||
this.forcedDateParser = forcedDateParser;
|
this.forcedDateParser = forcedDateParser;
|
||||||
|
this.queryShardContext = queryShardContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -189,7 +191,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
if (rewritten != this) {
|
if (rewritten != this) {
|
||||||
return rewritten;
|
return rewritten;
|
||||||
}
|
}
|
||||||
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, queryShardContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
||||||
@ -301,7 +303,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||||
Query query = innerRangeQuery(value, value, true, true, null, null);
|
Query query = innerRangeQuery(value, value, true, true, null, null, context);
|
||||||
if (boost() != 1f) {
|
if (boost() != 1f) {
|
||||||
query = new BoostQuery(query, boost());
|
query = new BoostQuery(query, boost());
|
||||||
}
|
}
|
||||||
@ -309,19 +311,19 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null);
|
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||||
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
DateMathParser parser = forcedDateParser == null
|
DateMathParser parser = forcedDateParser == null
|
||||||
? dateMathParser
|
? dateMathParser
|
||||||
@ -330,7 +332,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
if (lowerTerm == null) {
|
if (lowerTerm == null) {
|
||||||
l = Long.MIN_VALUE;
|
l = Long.MIN_VALUE;
|
||||||
} else {
|
} else {
|
||||||
l = parseToMilliseconds(lowerTerm, !includeLower, timeZone, parser);
|
l = parseToMilliseconds(lowerTerm, !includeLower, timeZone, parser, context);
|
||||||
if (includeLower == false) {
|
if (includeLower == false) {
|
||||||
++l;
|
++l;
|
||||||
}
|
}
|
||||||
@ -338,7 +340,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
if (upperTerm == null) {
|
if (upperTerm == null) {
|
||||||
u = Long.MAX_VALUE;
|
u = Long.MAX_VALUE;
|
||||||
} else {
|
} else {
|
||||||
u = parseToMilliseconds(upperTerm, includeUpper, timeZone, parser);
|
u = parseToMilliseconds(upperTerm, includeUpper, timeZone, parser, context);
|
||||||
if (includeUpper == false) {
|
if (includeUpper == false) {
|
||||||
--u;
|
--u;
|
||||||
}
|
}
|
||||||
@ -347,7 +349,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public long parseToMilliseconds(Object value, boolean roundUp,
|
public long parseToMilliseconds(Object value, boolean roundUp,
|
||||||
@Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
|
@Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||||
DateMathParser dateParser = dateMathParser();
|
DateMathParser dateParser = dateMathParser();
|
||||||
if (forcedDateParser != null) {
|
if (forcedDateParser != null) {
|
||||||
dateParser = forcedDateParser;
|
dateParser = forcedDateParser;
|
||||||
@ -359,14 +361,13 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
} else {
|
} else {
|
||||||
strValue = value.toString();
|
strValue = value.toString();
|
||||||
}
|
}
|
||||||
return dateParser.parse(strValue, now(), roundUp, zone);
|
return dateParser.parse(strValue, now(context), roundUp, zone);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Callable<Long> now() {
|
private static Callable<Long> now(QueryRewriteContext context) {
|
||||||
return () -> {
|
return () -> {
|
||||||
final SearchContext context = SearchContext.current();
|
|
||||||
return context != null
|
return context != null
|
||||||
? context.getQueryShardContext().nowInMillis()
|
? context.nowInMillis()
|
||||||
: System.currentTimeMillis();
|
: System.currentTimeMillis();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -390,7 +391,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
public Relation isFieldWithinQuery(IndexReader reader,
|
public Relation isFieldWithinQuery(IndexReader reader,
|
||||||
Object from, Object to,
|
Object from, Object to,
|
||||||
boolean includeLower, boolean includeUpper,
|
boolean includeLower, boolean includeUpper,
|
||||||
DateTimeZone timeZone, DateMathParser dateParser) throws IOException {
|
DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
|
||||||
if (dateParser == null) {
|
if (dateParser == null) {
|
||||||
dateParser = this.dateMathParser;
|
dateParser = this.dateMathParser;
|
||||||
}
|
}
|
||||||
@ -405,7 +406,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
|
|
||||||
long fromInclusive = Long.MIN_VALUE;
|
long fromInclusive = Long.MIN_VALUE;
|
||||||
if (from != null) {
|
if (from != null) {
|
||||||
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser);
|
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context);
|
||||||
if (includeLower == false) {
|
if (includeLower == false) {
|
||||||
if (fromInclusive == Long.MAX_VALUE) {
|
if (fromInclusive == Long.MAX_VALUE) {
|
||||||
return Relation.DISJOINT;
|
return Relation.DISJOINT;
|
||||||
@ -416,7 +417,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
|
|
||||||
long toInclusive = Long.MAX_VALUE;
|
long toInclusive = Long.MAX_VALUE;
|
||||||
if (to != null) {
|
if (to != null) {
|
||||||
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser);
|
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context);
|
||||||
if (includeUpper == false) {
|
if (includeUpper == false) {
|
||||||
if (toInclusive == Long.MIN_VALUE) {
|
if (toInclusive == Long.MIN_VALUE) {
|
||||||
return Relation.DISJOINT;
|
return Relation.DISJOINT;
|
||||||
@ -441,7 +442,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
Long val = (Long) value;
|
Long val = (Long) value;
|
||||||
if (val == null) {
|
if (val == null) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -178,7 +178,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
InetAddress lower;
|
InetAddress lower;
|
||||||
if (lowerTerm == null) {
|
if (lowerTerm == null) {
|
||||||
@ -231,7 +231,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -134,7 +134,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
node.put("index", index);
|
node.put("index", index);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new StringFieldMapper.TypeParser().parse(name, node, parserContext);
|
return new StringFieldMapper.TypeParser().parse(name, node, parserContext);
|
||||||
}
|
}
|
||||||
KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name);
|
KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name);
|
||||||
@ -196,7 +196,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@ -131,7 +132,7 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Byte valueForSearch(Object value) {
|
public Byte valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -146,7 +147,7 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : (int)parseValue(lowerTerm),
|
lowerTerm == null ? null : (int)parseValue(lowerTerm),
|
||||||
upperTerm == null ? null : (int)parseValue(upperTerm),
|
upperTerm == null ? null : (int)parseValue(upperTerm),
|
||||||
|
@ -43,8 +43,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField;
|
import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField;
|
||||||
|
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -184,14 +185,17 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
final boolean includeUpper;
|
final boolean includeUpper;
|
||||||
final DateTimeZone timeZone;
|
final DateTimeZone timeZone;
|
||||||
final DateMathParser forcedDateParser;
|
final DateMathParser forcedDateParser;
|
||||||
|
private QueryShardContext context;
|
||||||
|
|
||||||
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) {
|
public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone,
|
||||||
|
DateMathParser forcedDateParser, QueryShardContext context) {
|
||||||
this.lowerTerm = lowerTerm;
|
this.lowerTerm = lowerTerm;
|
||||||
this.upperTerm = upperTerm;
|
this.upperTerm = upperTerm;
|
||||||
this.includeLower = includeLower;
|
this.includeLower = includeLower;
|
||||||
this.includeUpper = includeUpper;
|
this.includeUpper = includeUpper;
|
||||||
this.timeZone = timeZone;
|
this.timeZone = timeZone;
|
||||||
this.forcedDateParser = forcedDateParser;
|
this.forcedDateParser = forcedDateParser;
|
||||||
|
this.context = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -200,7 +204,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
if (rewritten != this) {
|
if (rewritten != this) {
|
||||||
return rewritten;
|
return rewritten;
|
||||||
}
|
}
|
||||||
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
|
||||||
@ -339,7 +343,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
Long val = (Long) value;
|
Long val = (Long) value;
|
||||||
if (val == null) {
|
if (val == null) {
|
||||||
return null;
|
return null;
|
||||||
@ -348,8 +352,8 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null);
|
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -366,14 +370,20 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
dateTimeFormatter(), minValue, maxValue);
|
dateTimeFormatter(), minValue, maxValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||||
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
|
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) {
|
||||||
|
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
|
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper,
|
||||||
|
@Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
|
lowerTerm == null ? null
|
||||||
upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
|
: parseToMilliseconds(lowerTerm, !includeLower, timeZone,
|
||||||
|
forcedDateParser == null ? dateMathParser : forcedDateParser, context),
|
||||||
|
upperTerm == null ? null
|
||||||
|
: parseToMilliseconds(upperTerm, includeUpper, timeZone,
|
||||||
|
forcedDateParser == null ? dateMathParser : forcedDateParser, context),
|
||||||
includeLower, includeUpper);
|
includeLower, includeUpper);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -381,7 +391,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
public Relation isFieldWithinQuery(IndexReader reader,
|
public Relation isFieldWithinQuery(IndexReader reader,
|
||||||
Object from, Object to,
|
Object from, Object to,
|
||||||
boolean includeLower, boolean includeUpper,
|
boolean includeLower, boolean includeUpper,
|
||||||
DateTimeZone timeZone, DateMathParser dateParser) throws IOException {
|
DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException {
|
||||||
if (dateParser == null) {
|
if (dateParser == null) {
|
||||||
dateParser = this.dateMathParser;
|
dateParser = this.dateMathParser;
|
||||||
}
|
}
|
||||||
@ -397,7 +407,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
|
|
||||||
long fromInclusive = Long.MIN_VALUE;
|
long fromInclusive = Long.MIN_VALUE;
|
||||||
if (from != null) {
|
if (from != null) {
|
||||||
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser);
|
fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context);
|
||||||
if (includeLower == false) {
|
if (includeLower == false) {
|
||||||
if (fromInclusive == Long.MAX_VALUE) {
|
if (fromInclusive == Long.MAX_VALUE) {
|
||||||
return Relation.DISJOINT;
|
return Relation.DISJOINT;
|
||||||
@ -408,7 +418,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
|
|
||||||
long toInclusive = Long.MAX_VALUE;
|
long toInclusive = Long.MAX_VALUE;
|
||||||
if (to != null) {
|
if (to != null) {
|
||||||
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser);
|
toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context);
|
||||||
if (includeUpper == false) {
|
if (includeUpper == false) {
|
||||||
if (toInclusive == Long.MIN_VALUE) {
|
if (toInclusive == Long.MIN_VALUE) {
|
||||||
return Relation.DISJOINT;
|
return Relation.DISJOINT;
|
||||||
@ -426,7 +436,8 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) {
|
public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone,
|
||||||
|
@Nullable DateMathParser forcedDateParser, QueryRewriteContext context) {
|
||||||
if (value instanceof Long) {
|
if (value instanceof Long) {
|
||||||
return ((Long) value).longValue();
|
return ((Long) value).longValue();
|
||||||
}
|
}
|
||||||
@ -442,7 +453,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
} else {
|
} else {
|
||||||
strValue = value.toString();
|
strValue = value.toString();
|
||||||
}
|
}
|
||||||
return dateParser.parse(strValue, now(), inclusive, zone);
|
return dateParser.parse(strValue, now(context), inclusive, zone);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -474,13 +485,12 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
return (DateFieldType) super.fieldType();
|
return (DateFieldType) super.fieldType();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Callable<Long> now() {
|
private static Callable<Long> now(QueryRewriteContext context) {
|
||||||
return new Callable<Long>() {
|
return new Callable<Long>() {
|
||||||
@Override
|
@Override
|
||||||
public Long call() {
|
public Long call() {
|
||||||
final SearchContext context = SearchContext.current();
|
|
||||||
return context != null
|
return context != null
|
||||||
? context.getQueryShardContext().nowInMillis()
|
? context.nowInMillis()
|
||||||
: System.currentTimeMillis();
|
: System.currentTimeMillis();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@ -135,7 +136,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public java.lang.Double valueForSearch(Object value) {
|
public java.lang.Double valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -157,7 +158,7 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
|
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
|
||||||
upperTerm == null ? null : parseDoubleValue(upperTerm),
|
upperTerm == null ? null : parseDoubleValue(upperTerm),
|
||||||
|
@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@ -142,7 +143,7 @@ public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||||
upperTerm == null ? null : parseValue(upperTerm),
|
upperTerm == null ? null : parseValue(upperTerm),
|
||||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@ -145,7 +146,7 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||||
upperTerm == null ? null : parseValue(upperTerm),
|
upperTerm == null ? null : parseValue(upperTerm),
|
||||||
|
@ -171,7 +171,7 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
* IPs should return as a string.
|
* IPs should return as a string.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
Long val = (Long) value;
|
Long val = (Long) value;
|
||||||
if (val == null) {
|
if (val == null) {
|
||||||
return null;
|
return null;
|
||||||
@ -210,14 +210,14 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
if (fromTo != null) {
|
if (fromTo != null) {
|
||||||
return rangeQuery(fromTo[0] == 0 ? null : fromTo[0],
|
return rangeQuery(fromTo[0] == 0 ? null : fromTo[0],
|
||||||
fromTo[1] == MAX_IP ? null : fromTo[1], true, false);
|
fromTo[1] == MAX_IP ? null : fromTo[1], true, false, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return super.termQuery(value, context);
|
return super.termQuery(value, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||||
upperTerm == null ? null : parseValue(upperTerm),
|
upperTerm == null ? null : parseValue(upperTerm),
|
||||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@ -146,7 +147,7 @@ public class LegacyLongFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : parseLongValue(lowerTerm),
|
lowerTerm == null ? null : parseLongValue(lowerTerm),
|
||||||
upperTerm == null ? null : parseLongValue(upperTerm),
|
upperTerm == null ? null : parseLongValue(upperTerm),
|
||||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
||||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
@ -135,7 +136,7 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Short valueForSearch(Object value) {
|
public Short valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -150,7 +151,7 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||||
lowerTerm == null ? null : (int)parseValue(lowerTerm),
|
lowerTerm == null ? null : (int)parseValue(lowerTerm),
|
||||||
upperTerm == null ? null : (int)parseValue(upperTerm),
|
upperTerm == null ? null : (int)parseValue(upperTerm),
|
||||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.common.joda.DateMathParser;
|
|||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
|
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.index.query.QueryShardException;
|
import org.elasticsearch.index.query.QueryShardException;
|
||||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||||
@ -303,7 +304,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||||||
/** Given a value that comes from the stored fields API, convert it to the
|
/** Given a value that comes from the stored fields API, convert it to the
|
||||||
* expected type. For instance a date field would store dates as longs and
|
* expected type. For instance a date field would store dates as longs and
|
||||||
* format it back to a string in this method. */
|
* format it back to a string in this method. */
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -343,7 +344,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||||||
return new ConstantScoreQuery(builder.build());
|
return new ConstantScoreQuery(builder.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
|
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -399,10 +400,10 @@ public abstract class MappedFieldType extends FieldType {
|
|||||||
* {@link Relation#INTERSECTS}, which is always fine to return when there is
|
* {@link Relation#INTERSECTS}, which is always fine to return when there is
|
||||||
* no way to check whether values are actually within bounds. */
|
* no way to check whether values are actually within bounds. */
|
||||||
public Relation isFieldWithinQuery(
|
public Relation isFieldWithinQuery(
|
||||||
IndexReader reader,
|
IndexReader reader,
|
||||||
Object from, Object to,
|
Object from, Object to,
|
||||||
boolean includeLower, boolean includeUpper,
|
boolean includeLower, boolean includeUpper,
|
||||||
DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException {
|
DateTimeZone timeZone, DateMathParser dateMathParser, QueryRewriteContext context) throws IOException {
|
||||||
return Relation.INTERSECTS;
|
return Relation.INTERSECTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,6 +54,7 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
import java.util.function.LongSupplier;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@ -653,4 +654,5 @@ public class MapperService extends AbstractIndexComponent {
|
|||||||
return defaultAnalyzer;
|
return defaultAnalyzer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -815,7 +815,7 @@ public class NumberFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
Query query = type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper);
|
Query query = type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper);
|
||||||
if (boost() != 1f) {
|
if (boost() != 1f) {
|
||||||
@ -836,7 +836,7 @@ public class NumberFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -233,7 +233,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
Long lo = null;
|
Long lo = null;
|
||||||
if (lowerTerm != null) {
|
if (lowerTerm != null) {
|
||||||
@ -288,7 +288,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -46,6 +46,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
|
|||||||
super(ref);
|
super(ref);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public Query termsQuery(List<?> values, QueryShardContext context) {
|
public Query termsQuery(List<?> values, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||||
@ -85,7 +86,7 @@ public abstract class StringFieldType extends TermBasedFieldType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) {
|
||||||
failIfNotIndexed();
|
failIfNotIndexed();
|
||||||
return new TermRangeQuery(name(),
|
return new TermRangeQuery(name(),
|
||||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.AlreadyExpiredException;
|
import org.elasticsearch.index.AlreadyExpiredException;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
@ -139,15 +138,9 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||||||
|
|
||||||
// Overrides valueForSearch to display live value of remaining ttl
|
// Overrides valueForSearch to display live value of remaining ttl
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
long now;
|
final long now = System.currentTimeMillis();
|
||||||
SearchContext searchContext = SearchContext.current();
|
Long val = (Long) super.valueForDisplay(value);
|
||||||
if (searchContext != null) {
|
|
||||||
now = searchContext.getQueryShardContext().nowInMillis();
|
|
||||||
} else {
|
|
||||||
now = System.currentTimeMillis();
|
|
||||||
}
|
|
||||||
Long val = (Long) super.valueForSearch(value);
|
|
||||||
return val - now;
|
return val - now;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -177,11 +170,6 @@ public class TTLFieldMapper extends MetadataFieldMapper {
|
|||||||
return this.defaultTTL;
|
return this.defaultTTL;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Other implementation for realtime get display
|
|
||||||
public Object valueForSearch(long expirationTime) {
|
|
||||||
return expirationTime - System.currentTimeMillis();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void preParse(ParseContext context) throws IOException {
|
public void preParse(ParseContext context) throws IOException {
|
||||||
}
|
}
|
||||||
|
@ -179,7 +179,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object valueForSearch(Object value) {
|
public Object valueForDisplay(Object value) {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -377,7 +377,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||||||
switch(type) {
|
switch(type) {
|
||||||
case INDEXED:
|
case INDEXED:
|
||||||
LegacyGeoPointFieldType geoFieldType = ((LegacyGeoPointFieldType) fieldType);
|
LegacyGeoPointFieldType geoFieldType = ((LegacyGeoPointFieldType) fieldType);
|
||||||
query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
|
query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType, context);
|
||||||
break;
|
break;
|
||||||
case MEMORY:
|
case MEMORY:
|
||||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||||
|
@ -306,7 +306,7 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||||||
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
|
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
|
||||||
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
|
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
|
||||||
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance,
|
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance,
|
||||||
geoFieldType, indexFieldData, bboxOptimization);
|
geoFieldType, indexFieldData, bboxOptimization, shardContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
|
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
|
||||||
|
@ -356,7 +356,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||||
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
|
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
|
||||||
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
|
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
|
||||||
indexFieldData, bboxOptimization);
|
indexFieldData, bboxOptimization, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
|
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
|
||||||
|
@ -36,6 +36,7 @@ import org.elasticsearch.script.ScriptService;
|
|||||||
import org.elasticsearch.script.ScriptSettings;
|
import org.elasticsearch.script.ScriptSettings;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.function.LongSupplier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Context object used to rewrite {@link QueryBuilder} instances into simplified version.
|
* Context object used to rewrite {@link QueryBuilder} instances into simplified version.
|
||||||
@ -48,10 +49,11 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||||||
protected final Client client;
|
protected final Client client;
|
||||||
protected final IndexReader reader;
|
protected final IndexReader reader;
|
||||||
protected final ClusterState clusterState;
|
protected final ClusterState clusterState;
|
||||||
|
protected final LongSupplier nowInMillis;
|
||||||
|
|
||||||
public QueryRewriteContext(IndexSettings indexSettings, MapperService mapperService, ScriptService scriptService,
|
public QueryRewriteContext(IndexSettings indexSettings, MapperService mapperService, ScriptService scriptService,
|
||||||
IndicesQueriesRegistry indicesQueriesRegistry, Client client, IndexReader reader,
|
IndicesQueriesRegistry indicesQueriesRegistry, Client client, IndexReader reader,
|
||||||
ClusterState clusterState) {
|
ClusterState clusterState, LongSupplier nowInMillis) {
|
||||||
this.mapperService = mapperService;
|
this.mapperService = mapperService;
|
||||||
this.scriptService = scriptService;
|
this.scriptService = scriptService;
|
||||||
this.indexSettings = indexSettings;
|
this.indexSettings = indexSettings;
|
||||||
@ -59,6 +61,7 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||||||
this.client = client;
|
this.client = client;
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
this.clusterState = clusterState;
|
this.clusterState = clusterState;
|
||||||
|
this.nowInMillis = nowInMillis;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -117,6 +120,10 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier {
|
|||||||
return new QueryParseContext(defaultScriptLanguage, indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher());
|
return new QueryParseContext(defaultScriptLanguage, indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long nowInMillis() {
|
||||||
|
return nowInMillis.getAsLong();
|
||||||
|
}
|
||||||
|
|
||||||
public BytesReference getTemplateBytes(Script template) {
|
public BytesReference getTemplateBytes(Script template) {
|
||||||
ExecutableScript executable = scriptService.executable(template,
|
ExecutableScript executable = scriptService.executable(template,
|
||||||
ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||||
|
@ -65,7 +65,6 @@ import org.elasticsearch.script.Script;
|
|||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
|
||||||
import org.elasticsearch.search.lookup.SearchLookup;
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -78,6 +77,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
private final BitsetFilterCache bitsetFilterCache;
|
private final BitsetFilterCache bitsetFilterCache;
|
||||||
private final IndexFieldDataService indexFieldDataService;
|
private final IndexFieldDataService indexFieldDataService;
|
||||||
private final IndexSettings indexSettings;
|
private final IndexSettings indexSettings;
|
||||||
|
private final int shardId;
|
||||||
private String[] types = Strings.EMPTY_ARRAY;
|
private String[] types = Strings.EMPTY_ARRAY;
|
||||||
private boolean cachable = true;
|
private boolean cachable = true;
|
||||||
private final SetOnce<Boolean> frozen = new SetOnce<>();
|
private final SetOnce<Boolean> frozen = new SetOnce<>();
|
||||||
@ -97,13 +97,13 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
private boolean mapUnmappedFieldAsString;
|
private boolean mapUnmappedFieldAsString;
|
||||||
private NestedScope nestedScope;
|
private NestedScope nestedScope;
|
||||||
private boolean isFilter;
|
private boolean isFilter;
|
||||||
private final LongSupplier nowInMillis;
|
|
||||||
|
|
||||||
public QueryShardContext(IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService,
|
public QueryShardContext(int shardId, IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache,
|
||||||
MapperService mapperService, SimilarityService similarityService, ScriptService scriptService,
|
IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService,
|
||||||
final IndicesQueriesRegistry indicesQueriesRegistry, Client client,
|
ScriptService scriptService, final IndicesQueriesRegistry indicesQueriesRegistry, Client client,
|
||||||
IndexReader reader, ClusterState clusterState, LongSupplier nowInMillis) {
|
IndexReader reader, ClusterState clusterState, LongSupplier nowInMillis) {
|
||||||
super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState);
|
super(indexSettings, mapperService, scriptService, indicesQueriesRegistry, client, reader, clusterState, nowInMillis);
|
||||||
|
this.shardId = shardId;
|
||||||
this.indexSettings = indexSettings;
|
this.indexSettings = indexSettings;
|
||||||
this.similarityService = similarityService;
|
this.similarityService = similarityService;
|
||||||
this.mapperService = mapperService;
|
this.mapperService = mapperService;
|
||||||
@ -112,11 +112,11 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
|
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
|
||||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||||
this.nestedScope = new NestedScope();
|
this.nestedScope = new NestedScope();
|
||||||
this.nowInMillis = nowInMillis;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public QueryShardContext(QueryShardContext source) {
|
public QueryShardContext(QueryShardContext source) {
|
||||||
this(source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
|
this(source.shardId, source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService,
|
||||||
source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client,
|
source.similarityService, source.scriptService, source.indicesQueriesRegistry, source.client,
|
||||||
source.reader, source.clusterState, source.nowInMillis);
|
source.reader, source.clusterState, source.nowInMillis);
|
||||||
this.types = source.getTypes();
|
this.types = source.getTypes();
|
||||||
@ -264,21 +264,12 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
private SearchLookup lookup = null;
|
private SearchLookup lookup = null;
|
||||||
|
|
||||||
public SearchLookup lookup() {
|
public SearchLookup lookup() {
|
||||||
SearchContext current = SearchContext.current();
|
|
||||||
if (current != null) {
|
|
||||||
return current.lookup();
|
|
||||||
}
|
|
||||||
if (lookup == null) {
|
if (lookup == null) {
|
||||||
lookup = new SearchLookup(getMapperService(), indexFieldDataService, null);
|
lookup = new SearchLookup(getMapperService(), indexFieldDataService, types);
|
||||||
}
|
}
|
||||||
return lookup;
|
return lookup;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long nowInMillis() {
|
|
||||||
failIfFrozen();
|
|
||||||
return nowInMillis.getAsLong();
|
|
||||||
}
|
|
||||||
|
|
||||||
public NestedScope nestedScope() {
|
public NestedScope nestedScope() {
|
||||||
return nestedScope;
|
return nestedScope;
|
||||||
}
|
}
|
||||||
@ -411,4 +402,17 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
public boolean isCachable() {
|
public boolean isCachable() {
|
||||||
return cachable;
|
return cachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the shard ID this context was created for.
|
||||||
|
*/
|
||||||
|
public int getShardId() {
|
||||||
|
return shardId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nowInMillis() {
|
||||||
|
failIfFrozen();
|
||||||
|
return super.nowInMillis();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -406,7 +406,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||||||
} else {
|
} else {
|
||||||
DateMathParser dateMathParser = format == null ? null : new DateMathParser(format);
|
DateMathParser dateMathParser = format == null ? null : new DateMathParser(format);
|
||||||
return fieldType.isFieldWithinQuery(queryRewriteContext.getIndexReader(), from, to, includeLower,
|
return fieldType.isFieldWithinQuery(queryRewriteContext.getIndexReader(), from, to, includeLower,
|
||||||
includeUpper, timeZone, dateMathParser);
|
includeUpper, timeZone, dateMathParser, queryRewriteContext);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -445,21 +445,21 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
|
|||||||
forcedDateParser = new DateMathParser(this.format);
|
forcedDateParser = new DateMathParser(this.format);
|
||||||
}
|
}
|
||||||
query = ((LegacyDateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
|
query = ((LegacyDateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
|
||||||
timeZone, forcedDateParser);
|
timeZone, forcedDateParser, context);
|
||||||
} else if (mapper instanceof DateFieldMapper.DateFieldType) {
|
} else if (mapper instanceof DateFieldMapper.DateFieldType) {
|
||||||
DateMathParser forcedDateParser = null;
|
DateMathParser forcedDateParser = null;
|
||||||
if (this.format != null) {
|
if (this.format != null) {
|
||||||
forcedDateParser = new DateMathParser(this.format);
|
forcedDateParser = new DateMathParser(this.format);
|
||||||
}
|
}
|
||||||
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
|
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
|
||||||
timeZone, forcedDateParser);
|
timeZone, forcedDateParser, context);
|
||||||
} else {
|
} else {
|
||||||
if (timeZone != null) {
|
if (timeZone != null) {
|
||||||
throw new QueryShardException(context, "[range] time_zone can not be applied to non date field ["
|
throw new QueryShardException(context, "[range] time_zone can not be applied to non date field ["
|
||||||
+ fieldName + "]");
|
+ fieldName + "]");
|
||||||
}
|
}
|
||||||
//LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well?
|
//LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well?
|
||||||
query = mapper.rangeQuery(from, to, includeLower, includeUpper);
|
query = mapper.rangeQuery(from, to, includeLower, includeUpper, context);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (timeZone != null) {
|
if (timeZone != null) {
|
||||||
|
@ -48,6 +48,7 @@ import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
|
|||||||
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper;
|
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||||
|
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.search.MultiValueMode;
|
import org.elasticsearch.search.MultiValueMode;
|
||||||
|
|
||||||
@ -315,9 +316,10 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
|
|||||||
origin = context.nowInMillis();
|
origin = context.nowInMillis();
|
||||||
} else {
|
} else {
|
||||||
if (dateFieldType instanceof LegacyDateFieldMapper.DateFieldType) {
|
if (dateFieldType instanceof LegacyDateFieldMapper.DateFieldType) {
|
||||||
origin = ((LegacyDateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null);
|
origin = ((LegacyDateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null,
|
||||||
|
context);
|
||||||
} else {
|
} else {
|
||||||
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null);
|
origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -131,19 +131,11 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
|
|||||||
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
|
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
|
||||||
return new RandomScoreFunction();
|
return new RandomScoreFunction();
|
||||||
}
|
}
|
||||||
final int salt = (context.index().getName().hashCode() << 10) | getCurrentShardId();
|
final int salt = (context.index().getName().hashCode() << 10) | context.getShardId();
|
||||||
final IndexFieldData<?> uidFieldData = context.getForField(fieldType);
|
final IndexFieldData<?> uidFieldData = context.getForField(fieldType);
|
||||||
return new RandomScoreFunction(this.seed == null ? hash(context.nowInMillis()) : seed, salt, uidFieldData);
|
return new RandomScoreFunction(this.seed == null ? hash(context.nowInMillis()) : seed, salt, uidFieldData);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current shard's id for the seed. Protected because this method doesn't work during certain unit tests and needs to be
|
|
||||||
* replaced.
|
|
||||||
*/
|
|
||||||
int getCurrentShardId() {
|
|
||||||
return SearchContext.current().indexShard().shardId().id();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int hash(long value) {
|
private static int hash(long value) {
|
||||||
return Long.hashCode(value);
|
return Long.hashCode(value);
|
||||||
}
|
}
|
||||||
|
@ -35,8 +35,8 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||||
import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
@ -63,7 +63,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||||||
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower,
|
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower,
|
||||||
boolean includeUpper, GeoDistance geoDistance,
|
boolean includeUpper, GeoDistance geoDistance,
|
||||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType,
|
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType,
|
||||||
IndexGeoPointFieldData indexFieldData, String optimizeBbox) {
|
IndexGeoPointFieldData indexFieldData, String optimizeBbox, QueryShardContext context) {
|
||||||
this.lat = point.lat();
|
this.lat = point.lat();
|
||||||
this.lon = point.lon();
|
this.lon = point.lon();
|
||||||
this.geoDistance = geoDistance;
|
this.geoDistance = geoDistance;
|
||||||
@ -96,7 +96,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||||||
boundingBoxFilter = null;
|
boundingBoxFilter = null;
|
||||||
} else if ("indexed".equals(optimizeBbox)) {
|
} else if ("indexed".equals(optimizeBbox)) {
|
||||||
boundingBoxFilter = LegacyIndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(),
|
boundingBoxFilter = LegacyIndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(),
|
||||||
distanceBoundingCheck.bottomRight(), fieldType);
|
distanceBoundingCheck.bottomRight(), fieldType, context);
|
||||||
distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
|
distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported");
|
throw new IllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported");
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -34,34 +35,34 @@ import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
|
|||||||
public class LegacyIndexedGeoBoundingBoxQuery {
|
public class LegacyIndexedGeoBoundingBoxQuery {
|
||||||
|
|
||||||
public static Query create(GeoPoint topLeft, GeoPoint bottomRight,
|
public static Query create(GeoPoint topLeft, GeoPoint bottomRight,
|
||||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) {
|
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) {
|
||||||
if (!fieldType.isLatLonEnabled()) {
|
if (!fieldType.isLatLonEnabled()) {
|
||||||
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name()
|
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name()
|
||||||
+ "], can't use indexed filter on it");
|
+ "], can't use indexed filter on it");
|
||||||
}
|
}
|
||||||
//checks to see if bounding box crosses 180 degrees
|
//checks to see if bounding box crosses 180 degrees
|
||||||
if (topLeft.lon() > bottomRight.lon()) {
|
if (topLeft.lon() > bottomRight.lon()) {
|
||||||
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
|
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context);
|
||||||
} else {
|
} else {
|
||||||
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
|
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
|
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
|
||||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) {
|
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) {
|
||||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||||
filter.setMinimumNumberShouldMatch(1);
|
filter.setMinimumNumberShouldMatch(1);
|
||||||
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD);
|
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, context), Occur.SHOULD);
|
||||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true), Occur.SHOULD);
|
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true, context), Occur.SHOULD);
|
||||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
|
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST);
|
||||||
return new ConstantScoreQuery(filter.build());
|
return new ConstantScoreQuery(filter.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
|
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
|
||||||
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType) {
|
LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) {
|
||||||
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
BooleanQuery.Builder filter = new BooleanQuery.Builder();
|
||||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
|
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, context), Occur.MUST);
|
||||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
|
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST);
|
||||||
return new ConstantScoreQuery(filter.build());
|
return new ConstantScoreQuery(filter.build());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -197,7 +197,8 @@ public class IndicesTTLService extends AbstractLifecycleComponent {
|
|||||||
|
|
||||||
private void purgeShards(List<IndexShard> shardsToPurge) {
|
private void purgeShards(List<IndexShard> shardsToPurge) {
|
||||||
for (IndexShard shardToPurge : shardsToPurge) {
|
for (IndexShard shardToPurge : shardsToPurge) {
|
||||||
Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true);
|
Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false,
|
||||||
|
true, null);
|
||||||
Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
|
Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
|
||||||
try {
|
try {
|
||||||
logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id());
|
logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id());
|
||||||
|
@ -136,7 +136,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||||||
private SearchContextHighlight highlight;
|
private SearchContextHighlight highlight;
|
||||||
private SuggestionSearchContext suggest;
|
private SuggestionSearchContext suggest;
|
||||||
private List<RescoreSearchContext> rescore;
|
private List<RescoreSearchContext> rescore;
|
||||||
private SearchLookup searchLookup;
|
|
||||||
private volatile long keepAlive;
|
private volatile long keepAlive;
|
||||||
private final long originNanoTime = System.nanoTime();
|
private final long originNanoTime = System.nanoTime();
|
||||||
private volatile long lastAccessTime = -1;
|
private volatile long lastAccessTime = -1;
|
||||||
@ -168,17 +167,10 @@ final class DefaultSearchContext extends SearchContext {
|
|||||||
this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy());
|
this.searcher = new ContextIndexSearcher(engineSearcher, indexService.cache().query(), indexShard.getQueryCachingPolicy());
|
||||||
this.timeEstimateCounter = timeEstimateCounter;
|
this.timeEstimateCounter = timeEstimateCounter;
|
||||||
this.timeout = timeout;
|
this.timeout = timeout;
|
||||||
queryShardContext = indexService.newQueryShardContext(searcher.getIndexReader(), request::nowInMillis);
|
queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis);
|
||||||
queryShardContext.setTypes(request.types());
|
queryShardContext.setTypes(request.types());
|
||||||
}
|
}
|
||||||
|
|
||||||
DefaultSearchContext(DefaultSearchContext source) {
|
|
||||||
this(source.id(), source.request(), source.shardTarget(), source.engineSearcher, source.indexService, source.indexShard(),
|
|
||||||
source.bigArrays(), source.timeEstimateCounter(), source.parseFieldMatcher(), source.timeout(), source.fetchPhase());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doClose() {
|
public void doClose() {
|
||||||
// clear and scope phase we have
|
// clear and scope phase we have
|
||||||
@ -742,15 +734,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||||||
this.keepAlive = keepAlive;
|
this.keepAlive = keepAlive;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public SearchLookup lookup() {
|
|
||||||
// TODO: The types should take into account the parsing context in QueryParserContext...
|
|
||||||
if (searchLookup == null) {
|
|
||||||
searchLookup = new SearchLookup(mapperService(), fieldData(), request.types());
|
|
||||||
}
|
|
||||||
return searchLookup;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DfsSearchResult dfsResult() {
|
public DfsSearchResult dfsResult() {
|
||||||
return dfsResult;
|
return dfsResult;
|
||||||
|
@ -494,7 +494,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||||||
if (context == null) {
|
if (context == null) {
|
||||||
throw new SearchContextMissingException(id);
|
throw new SearchContextMissingException(id);
|
||||||
}
|
}
|
||||||
SearchContext.setCurrent(context);
|
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -519,15 +518,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||||||
final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws IOException {
|
final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws IOException {
|
||||||
final DefaultSearchContext context = createSearchContext(request, defaultSearchTimeout, searcher);
|
final DefaultSearchContext context = createSearchContext(request, defaultSearchTimeout, searcher);
|
||||||
try {
|
try {
|
||||||
// we clone the search context here just for rewriting otherwise we
|
// we clone the query shard context here just for rewriting otherwise we
|
||||||
// might end up with incorrect state since we are using now() or script services
|
// might end up with incorrect state since we are using now() or script services
|
||||||
// during rewrite and normalized / evaluate templates etc.
|
// during rewrite and normalized / evaluate templates etc.
|
||||||
// NOTE this context doesn't need to be closed - the outer context will
|
request.rewrite(new QueryShardContext(context.getQueryShardContext()));
|
||||||
// take care of this.
|
|
||||||
DefaultSearchContext rewriteContext = new DefaultSearchContext(context);
|
|
||||||
SearchContext.setCurrent(rewriteContext);
|
|
||||||
request.rewrite(rewriteContext.getQueryShardContext());
|
|
||||||
SearchContext.setCurrent(context);
|
|
||||||
assert context.getQueryShardContext().isCachable();
|
assert context.getQueryShardContext().isCachable();
|
||||||
if (request.scroll() != null) {
|
if (request.scroll() != null) {
|
||||||
context.scrollContext(new ScrollContext());
|
context.scrollContext(new ScrollContext());
|
||||||
@ -620,9 +614,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||||||
|
|
||||||
private void cleanContext(SearchContext context) {
|
private void cleanContext(SearchContext context) {
|
||||||
try {
|
try {
|
||||||
assert context == SearchContext.current();
|
|
||||||
context.clearReleasables(Lifetime.PHASE);
|
context.clearReleasables(Lifetime.PHASE);
|
||||||
SearchContext.removeCurrent();
|
|
||||||
} finally {
|
} finally {
|
||||||
context.decRef();
|
context.decRef();
|
||||||
}
|
}
|
||||||
|
@ -91,14 +91,14 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||||||
: searcher.count(filter);
|
: searcher.count(filter);
|
||||||
this.bucketCountThresholds = bucketCountThresholds;
|
this.bucketCountThresholds = bucketCountThresholds;
|
||||||
this.significanceHeuristic = significanceHeuristic;
|
this.significanceHeuristic = significanceHeuristic;
|
||||||
setFieldInfo();
|
setFieldInfo(context.searchContext());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setFieldInfo() {
|
private void setFieldInfo(SearchContext context) {
|
||||||
if (!config.unmapped()) {
|
if (!config.unmapped()) {
|
||||||
this.indexedFieldName = config.fieldContext().field();
|
this.indexedFieldName = config.fieldContext().field();
|
||||||
fieldType = SearchContext.current().smartNameFieldType(indexedFieldName);
|
fieldType = context.smartNameFieldType(indexedFieldName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ public final class MatchedQueriesFetchSubPhase implements FetchSubPhase {
|
|||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw ExceptionsHelper.convertToElastic(e);
|
throw ExceptionsHelper.convertToElastic(e);
|
||||||
} finally {
|
} finally {
|
||||||
SearchContext.current().clearReleasables(Lifetime.COLLECTION);
|
context.clearReleasables(Lifetime.COLLECTION);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -80,21 +80,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
// For reference why we use RefCounted here see #20095
|
// For reference why we use RefCounted here see #20095
|
||||||
public abstract class SearchContext extends AbstractRefCounted implements Releasable {
|
public abstract class SearchContext extends AbstractRefCounted implements Releasable {
|
||||||
|
|
||||||
private static ThreadLocal<SearchContext> current = new ThreadLocal<>();
|
|
||||||
public static final int DEFAULT_TERMINATE_AFTER = 0;
|
public static final int DEFAULT_TERMINATE_AFTER = 0;
|
||||||
|
|
||||||
public static void setCurrent(SearchContext value) {
|
|
||||||
current.set(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void removeCurrent() {
|
|
||||||
current.remove();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SearchContext current() {
|
|
||||||
return current.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
private Map<Lifetime, List<Releasable>> clearables = null;
|
private Map<Lifetime, List<Releasable>> clearables = null;
|
||||||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||||
private InnerHitsContext innerHitsContext;
|
private InnerHitsContext innerHitsContext;
|
||||||
@ -315,7 +301,9 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
|
|||||||
|
|
||||||
public abstract void keepAlive(long keepAlive);
|
public abstract void keepAlive(long keepAlive);
|
||||||
|
|
||||||
public abstract SearchLookup lookup();
|
public SearchLookup lookup() {
|
||||||
|
return getQueryShardContext().lookup();
|
||||||
|
}
|
||||||
|
|
||||||
public abstract DfsSearchResult dfsResult();
|
public abstract DfsSearchResult dfsResult();
|
||||||
|
|
||||||
|
@ -342,16 +342,6 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||||||
return fetchSearchResult;
|
return fetchSearchResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
private SearchLookup searchLookup;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SearchLookup lookup() {
|
|
||||||
if (searchLookup == null) {
|
|
||||||
searchLookup = new SearchLookup(mapperService(), fieldData(), request().types());
|
|
||||||
}
|
|
||||||
return searchLookup;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Counter timeEstimateCounter() {
|
public Counter timeEstimateCounter() {
|
||||||
throw new UnsupportedOperationException("Not supported");
|
throw new UnsupportedOperationException("Not supported");
|
||||||
|
@ -30,10 +30,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||||
@ -102,7 +98,7 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");
|
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");
|
||||||
assertEquals(new BytesRef(value), indexedValue);
|
assertEquals(new BytesRef(value), indexedValue);
|
||||||
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
|
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
|
||||||
Object originalValue = fieldMapper.fieldType().valueForSearch(indexedValue);
|
Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue);
|
||||||
assertEquals(new BytesArray(value), originalValue);
|
assertEquals(new BytesArray(value), originalValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,8 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||||||
import org.apache.lucene.index.IndexOptions;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.elasticsearch.index.mapper.BooleanFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
||||||
@ -44,11 +42,11 @@ public class BooleanFieldTypeTests extends FieldTypeTestCase {
|
|||||||
|
|
||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
assertEquals(true, ft.valueForSearch("T"));
|
assertEquals(true, ft.valueForDisplay("T"));
|
||||||
assertEquals(false, ft.valueForSearch("F"));
|
assertEquals(false, ft.valueForDisplay("F"));
|
||||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForSearch(0));
|
expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay(0));
|
||||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForSearch("true"));
|
expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay("true"));
|
||||||
expectThrows(IllegalArgumentException.class, () -> ft.valueForSearch("G"));
|
expectThrows(IllegalArgumentException.class, () -> ft.valueForDisplay("G"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTermQuery() {
|
public void testTermQuery() {
|
||||||
|
@ -28,15 +28,10 @@ import org.apache.lucene.index.IndexReader;
|
|||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.MultiReader;
|
import org.apache.lucene.index.MultiReader;
|
||||||
import org.apache.lucene.index.Term;
|
|
||||||
import org.apache.lucene.search.TermQuery;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.common.joda.DateMathParser;
|
import org.elasticsearch.common.joda.DateMathParser;
|
||||||
import org.elasticsearch.common.joda.Joda;
|
import org.elasticsearch.common.joda.Joda;
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
@ -71,31 +66,31 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||||||
DateFieldType ft = new DateFieldType();
|
DateFieldType ft = new DateFieldType();
|
||||||
ft.setName("my_date");
|
ft.setName("my_date");
|
||||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
true, true, null, null));
|
true, true, null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
false, false, null, null));
|
false, false, null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
false, true, null, null));
|
false, true, null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
true, false, null, null));
|
true, false, null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIsFieldWithinQuery() throws IOException {
|
public void testIsFieldWithinQuery() throws IOException {
|
||||||
@ -121,7 +116,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||||||
// Fields with no value indexed.
|
// Fields with no value indexed.
|
||||||
DateFieldType ft2 = new DateFieldType();
|
DateFieldType ft2 = new DateFieldType();
|
||||||
ft2.setName("my_date2");
|
ft2.setName("my_date2");
|
||||||
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null));
|
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, null));
|
||||||
IOUtils.close(reader, w, dir);
|
IOUtils.close(reader, w, dir);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,7 +141,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
String date = "2015-10-12T12:09:55.000Z";
|
String date = "2015-10-12T12:09:55.000Z";
|
||||||
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||||
assertEquals(date, ft.valueForSearch(instant));
|
assertEquals(date, ft.valueForDisplay(instant));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTermQuery() {
|
public void testTermQuery() {
|
||||||
@ -172,11 +167,11 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
|
|||||||
long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date2).getMillis();
|
long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date2).getMillis();
|
||||||
ft.setIndexOptions(IndexOptions.DOCS);
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
assertEquals(LongPoint.newRangeQuery("field", instant1, instant2),
|
assertEquals(LongPoint.newRangeQuery("field", instant1, instant2),
|
||||||
ft.rangeQuery(date1, date2, true, true).rewrite(new MultiReader()));
|
ft.rangeQuery(date1, date2, true, true, null).rewrite(new MultiReader()));
|
||||||
|
|
||||||
ft.setIndexOptions(IndexOptions.NONE);
|
ft.setIndexOptions(IndexOptions.NONE);
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> ft.rangeQuery(date1, date2, true, true));
|
() -> ft.rangeQuery(date1, date2, true, true, null));
|
||||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ public class IdFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
ft.setName("_id");
|
ft.setName("_id");
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null));
|
||||||
assertEquals("Field [_id] of type [_id] does not support range queries", e.getMessage());
|
assertEquals("Field [_id] of type [_id] does not support range queries", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,8 +25,6 @@ import org.apache.lucene.index.IndexOptions;
|
|||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.network.InetAddresses;
|
import org.elasticsearch.common.network.InetAddresses;
|
||||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
|
|
||||||
public class IpFieldTypeTests extends FieldTypeTestCase {
|
public class IpFieldTypeTests extends FieldTypeTestCase {
|
||||||
@Override
|
@Override
|
||||||
@ -49,11 +47,11 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
String ip = "2001:db8::2:1";
|
String ip = "2001:db8::2:1";
|
||||||
BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip)));
|
BytesRef asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip)));
|
||||||
assertEquals(ip, ft.valueForSearch(asBytes));
|
assertEquals(ip, ft.valueForDisplay(asBytes));
|
||||||
|
|
||||||
ip = "192.168.1.7";
|
ip = "192.168.1.7";
|
||||||
asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip)));
|
asBytes = new BytesRef(InetAddressPoint.encode(InetAddresses.forString(ip)));
|
||||||
assertEquals(ip, ft.valueForSearch(asBytes));
|
assertEquals(ip, ft.valueForDisplay(asBytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTermQuery() {
|
public void testTermQuery() {
|
||||||
@ -88,83 +86,83 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("::"),
|
InetAddresses.forString("::"),
|
||||||
InetAddressPoint.MAX_VALUE),
|
InetAddressPoint.MAX_VALUE),
|
||||||
ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("::"),
|
InetAddresses.forString("::"),
|
||||||
InetAddresses.forString("192.168.2.0")),
|
InetAddresses.forString("192.168.2.0")),
|
||||||
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true));
|
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("::"),
|
InetAddresses.forString("::"),
|
||||||
InetAddresses.forString("192.168.1.255")),
|
InetAddresses.forString("192.168.1.255")),
|
||||||
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false));
|
ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("2001:db8::"),
|
InetAddresses.forString("2001:db8::"),
|
||||||
InetAddressPoint.MAX_VALUE),
|
InetAddressPoint.MAX_VALUE),
|
||||||
ft.rangeQuery("2001:db8::", null, true, randomBoolean()));
|
ft.rangeQuery("2001:db8::", null, true, randomBoolean(), null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("2001:db8::1"),
|
InetAddresses.forString("2001:db8::1"),
|
||||||
InetAddressPoint.MAX_VALUE),
|
InetAddressPoint.MAX_VALUE),
|
||||||
ft.rangeQuery("2001:db8::", null, false, randomBoolean()));
|
ft.rangeQuery("2001:db8::", null, false, randomBoolean(), null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("2001:db8::"),
|
InetAddresses.forString("2001:db8::"),
|
||||||
InetAddresses.forString("2001:db8::ffff")),
|
InetAddresses.forString("2001:db8::ffff")),
|
||||||
ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true));
|
ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("2001:db8::1"),
|
InetAddresses.forString("2001:db8::1"),
|
||||||
InetAddresses.forString("2001:db8::fffe")),
|
InetAddresses.forString("2001:db8::fffe")),
|
||||||
ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false));
|
ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("2001:db8::2"),
|
InetAddresses.forString("2001:db8::2"),
|
||||||
InetAddresses.forString("2001:db8::")),
|
InetAddresses.forString("2001:db8::")),
|
||||||
// same lo/hi values but inclusive=false so this won't match anything
|
// same lo/hi values but inclusive=false so this won't match anything
|
||||||
ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false));
|
ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false, null));
|
||||||
|
|
||||||
// Upper bound is the min IP and is not inclusive
|
// Upper bound is the min IP and is not inclusive
|
||||||
assertEquals(new MatchNoDocsQuery(),
|
assertEquals(new MatchNoDocsQuery(),
|
||||||
ft.rangeQuery("::", "::", true, false));
|
ft.rangeQuery("::", "::", true, false, null));
|
||||||
|
|
||||||
// Lower bound is the max IP and is not inclusive
|
// Lower bound is the max IP and is not inclusive
|
||||||
assertEquals(new MatchNoDocsQuery(),
|
assertEquals(new MatchNoDocsQuery(),
|
||||||
ft.rangeQuery("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true));
|
ft.rangeQuery("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("::"),
|
InetAddresses.forString("::"),
|
||||||
InetAddresses.forString("::fffe:ffff:ffff")),
|
InetAddresses.forString("::fffe:ffff:ffff")),
|
||||||
// same lo/hi values but inclusive=false so this won't match anything
|
// same lo/hi values but inclusive=false so this won't match anything
|
||||||
ft.rangeQuery("::", "0.0.0.0", true, false));
|
ft.rangeQuery("::", "0.0.0.0", true, false, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("::1:0:0:0"),
|
InetAddresses.forString("::1:0:0:0"),
|
||||||
InetAddressPoint.MAX_VALUE),
|
InetAddressPoint.MAX_VALUE),
|
||||||
// same lo/hi values but inclusive=false so this won't match anything
|
// same lo/hi values but inclusive=false so this won't match anything
|
||||||
ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true));
|
ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
// lower bound is ipv4, upper bound is ipv6
|
// lower bound is ipv4, upper bound is ipv6
|
||||||
InetAddressPoint.newRangeQuery("field",
|
InetAddressPoint.newRangeQuery("field",
|
||||||
InetAddresses.forString("192.168.1.7"),
|
InetAddresses.forString("192.168.1.7"),
|
||||||
InetAddresses.forString("2001:db8::")),
|
InetAddresses.forString("2001:db8::")),
|
||||||
ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true));
|
ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null));
|
||||||
|
|
||||||
ft.setIndexOptions(IndexOptions.NONE);
|
ft.setIndexOptions(IndexOptions.NONE);
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> ft.rangeQuery("::1", "2001::", true, true));
|
() -> ft.rangeQuery("::1", "2001::", true, true, null));
|
||||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase {
|
|||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null,
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null,
|
||||||
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
|
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
|
||||||
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
|
RandomStrings.randomAsciiOfLengthBetween(random(), 0, 5),
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTermQuery() {
|
public void testTermQuery() {
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.LegacyByteFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
public class LegacyByteFieldTypeTests extends FieldTypeTestCase {
|
public class LegacyByteFieldTypeTests extends FieldTypeTestCase {
|
||||||
@ -36,6 +34,6 @@ public class LegacyByteFieldTypeTests extends FieldTypeTestCase {
|
|||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
// bytes are stored as ints
|
// bytes are stored as ints
|
||||||
assertEquals(Byte.valueOf((byte) 3), ft.valueForSearch(Integer.valueOf(3)));
|
assertEquals(Byte.valueOf((byte) 3), ft.valueForDisplay(Integer.valueOf(3)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -249,13 +249,8 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.bytes());
|
.bytes());
|
||||||
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())));
|
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())));
|
||||||
|
|
||||||
LegacyNumericRangeQuery<Long> rangeQuery;
|
LegacyNumericRangeQuery<Long> rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType()
|
||||||
try {
|
.rangeQuery("10:00:00", "11:00:00", true, true, null).rewrite(null);
|
||||||
SearchContext.setCurrent(new TestSearchContext(null));
|
|
||||||
rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true).rewrite(null);
|
|
||||||
} finally {
|
|
||||||
SearchContext.removeCurrent();
|
|
||||||
}
|
|
||||||
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(11).millis(), DateTimeZone.UTC).getMillis()));
|
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(11).millis(), DateTimeZone.UTC).getMillis()));
|
||||||
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()));
|
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()));
|
||||||
}
|
}
|
||||||
@ -275,13 +270,8 @@ public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.bytes());
|
.bytes());
|
||||||
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())));
|
assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())));
|
||||||
|
|
||||||
LegacyNumericRangeQuery<Long> rangeQuery;
|
LegacyNumericRangeQuery<Long> rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType()
|
||||||
try {
|
.rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null).rewrite(null);
|
||||||
SearchContext.setCurrent(new TestSearchContext(null));
|
|
||||||
rangeQuery = (LegacyNumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true).rewrite(null);
|
|
||||||
} finally {
|
|
||||||
SearchContext.removeCurrent();
|
|
||||||
}
|
|
||||||
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(35).millis(), DateTimeZone.UTC).getMillis()));
|
assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(35).millis(), DateTimeZone.UTC).getMillis()));
|
||||||
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()));
|
assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()));
|
||||||
}
|
}
|
||||||
|
@ -29,8 +29,6 @@ import org.apache.lucene.store.Directory;
|
|||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.common.joda.DateMathParser;
|
import org.elasticsearch.common.joda.DateMathParser;
|
||||||
import org.elasticsearch.common.joda.Joda;
|
import org.elasticsearch.common.joda.Joda;
|
||||||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyDateFieldMapper.DateFieldType;
|
import org.elasticsearch.index.mapper.LegacyDateFieldMapper.DateFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
@ -75,31 +73,31 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||||||
DateFieldType ft = new DateFieldType();
|
DateFieldType ft = new DateFieldType();
|
||||||
ft.setName("my_date");
|
ft.setName("my_date");
|
||||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader,
|
||||||
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
DateTimeZone zone, DateMathParser alternateFormat) throws IOException {
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29",
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
true, true, null, null));
|
true, true, null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
false, false, null, null));
|
false, false, null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
false, true, null, null));
|
false, true, null, null, null));
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03",
|
||||||
true, false, null, null));
|
true, false, null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIsFieldWithinQuery() throws IOException {
|
public void testIsFieldWithinQuery() throws IOException {
|
||||||
@ -145,6 +143,6 @@ public class LegacyDateFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
String date = "2015-10-12T12:09:55.000Z";
|
String date = "2015-10-12T12:09:55.000Z";
|
||||||
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||||
assertEquals(date, ft.valueForSearch(instant));
|
assertEquals(date, ft.valueForDisplay(instant));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType;
|
import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -41,11 +39,11 @@ public class LegacyDoubleFieldTypeTests extends FieldTypeTestCase {
|
|||||||
DoubleFieldType ft = new DoubleFieldType();
|
DoubleFieldType ft = new DoubleFieldType();
|
||||||
// current impl ignores args and shourd always return INTERSECTS
|
// current impl ignores args and shourd always return INTERSECTS
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(),
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(),
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
assertEquals(Double.valueOf(1.2), ft.valueForSearch(1.2));
|
assertEquals(Double.valueOf(1.2), ft.valueForDisplay(1.2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.LegacyFloatFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyFloatFieldMapper.FloatFieldType;
|
import org.elasticsearch.index.mapper.LegacyFloatFieldMapper.FloatFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -41,11 +39,11 @@ public class LegacyFloatFieldTypeTests extends FieldTypeTestCase {
|
|||||||
FloatFieldType ft = new FloatFieldType();
|
FloatFieldType ft = new FloatFieldType();
|
||||||
// current impl ignores args and shourd always return INTERSECTS
|
// current impl ignores args and shourd always return INTERSECTS
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomFloat(), randomFloat(),
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomFloat(), randomFloat(),
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
assertEquals(Float.valueOf(1.2f), ft.valueForSearch(1.2f));
|
assertEquals(Float.valueOf(1.2f), ft.valueForDisplay(1.2f));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper.IntegerFieldType;
|
import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper.IntegerFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -41,11 +39,11 @@ public class LegacyIntegerFieldTypeTests extends FieldTypeTestCase {
|
|||||||
IntegerFieldType ft = new IntegerFieldType();
|
IntegerFieldType ft = new IntegerFieldType();
|
||||||
// current impl ignores args and shourd always return INTERSECTS
|
// current impl ignores args and shourd always return INTERSECTS
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomInt(), randomInt(),
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomInt(), randomInt(),
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
assertEquals(Integer.valueOf(3), ft.valueForSearch(Integer.valueOf(3)));
|
assertEquals(Integer.valueOf(3), ft.valueForDisplay(Integer.valueOf(3)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.LegacyLongFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.LegacyLongFieldMapper.LongFieldType;
|
import org.elasticsearch.index.mapper.LegacyLongFieldMapper.LongFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -41,11 +39,11 @@ public class LegacyLongFieldTypeTests extends FieldTypeTestCase {
|
|||||||
LongFieldType ft = new LongFieldType();
|
LongFieldType ft = new LongFieldType();
|
||||||
// current impl ignores args and shourd always return INTERSECTS
|
// current impl ignores args and shourd always return INTERSECTS
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomLong(), randomLong(),
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomLong(), randomLong(),
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
assertEquals(Long.valueOf(3), ft.valueForSearch(Long.valueOf(3)));
|
assertEquals(Long.valueOf(3), ft.valueForDisplay(Long.valueOf(3)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.LegacyShortFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
public class LegacyShortFieldTypeTests extends FieldTypeTestCase {
|
public class LegacyShortFieldTypeTests extends FieldTypeTestCase {
|
||||||
@ -36,6 +34,6 @@ public class LegacyShortFieldTypeTests extends FieldTypeTestCase {
|
|||||||
public void testValueForSearch() {
|
public void testValueForSearch() {
|
||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
// shorts are stored as ints
|
// shorts are stored as ints
|
||||||
assertEquals(Short.valueOf((short) 3), ft.valueForSearch(Integer.valueOf(3)));
|
assertEquals(Short.valueOf((short) 3), ft.valueForDisplay(Integer.valueOf(3)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
// current impl ignores args and should always return INTERSECTS
|
// current impl ignores args and should always return INTERSECTS
|
||||||
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(),
|
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(),
|
||||||
randomBoolean(), randomBoolean(), null, null));
|
randomBoolean(), randomBoolean(), null, null, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTermQuery() {
|
public void testTermQuery() {
|
||||||
@ -78,11 +78,11 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
MappedFieldType ft = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||||
ft.setName("field");
|
ft.setName("field");
|
||||||
ft.setIndexOptions(IndexOptions.DOCS);
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
assertEquals(LongPoint.newRangeQuery("field", 1, 3), ft.rangeQuery("1", "3", true, true));
|
assertEquals(LongPoint.newRangeQuery("field", 1, 3), ft.rangeQuery("1", "3", true, true, null));
|
||||||
|
|
||||||
ft.setIndexOptions(IndexOptions.NONE);
|
ft.setIndexOptions(IndexOptions.NONE);
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> ft.rangeQuery("1", "3", true, true));
|
() -> ft.rangeQuery("1", "3", true, true, null));
|
||||||
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,9 +38,6 @@ import org.elasticsearch.index.IndexSettings;
|
|||||||
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.ScaledFloatFieldMapper;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -122,7 +119,7 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
|||||||
boolean includeLower = randomBoolean();
|
boolean includeLower = randomBoolean();
|
||||||
boolean includeUpper = randomBoolean();
|
boolean includeUpper = randomBoolean();
|
||||||
Query doubleQ = NumberFieldMapper.NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper);
|
Query doubleQ = NumberFieldMapper.NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper);
|
||||||
Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper);
|
Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper, null);
|
||||||
assertEquals(searcher.count(doubleQ), searcher.count(scaledFloatQ));
|
assertEquals(searcher.count(doubleQ), searcher.count(scaledFloatQ));
|
||||||
}
|
}
|
||||||
IOUtils.close(reader, dir);
|
IOUtils.close(reader, dir);
|
||||||
@ -132,8 +129,8 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase {
|
|||||||
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
|
ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
|
||||||
ft.setName("scaled_float");
|
ft.setName("scaled_float");
|
||||||
ft.setScalingFactor(0.1 + randomDouble() * 100);
|
ft.setScalingFactor(0.1 + randomDouble() * 100);
|
||||||
assertNull(ft.valueForSearch(null));
|
assertNull(ft.valueForDisplay(null));
|
||||||
assertEquals(10/ft.getScalingFactor(), ft.valueForSearch(10L));
|
assertEquals(10/ft.getScalingFactor(), ft.valueForDisplay(10L));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStats() throws IOException {
|
public void testStats() throws IOException {
|
||||||
|
@ -18,10 +18,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
|
||||||
import org.elasticsearch.index.mapper.TimestampFieldMapper;
|
|
||||||
|
|
||||||
public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests {
|
public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests {
|
||||||
@Override
|
@Override
|
||||||
protected MappedFieldType createDefaultFieldType() {
|
protected MappedFieldType createDefaultFieldType() {
|
||||||
@ -33,6 +29,6 @@ public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
String date = "2015-10-12T12:09:55.000Z";
|
String date = "2015-10-12T12:09:55.000Z";
|
||||||
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis();
|
||||||
assertEquals(instant, ft.valueForSearch(instant));
|
assertEquals(instant, ft.valueForDisplay(instant));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,7 @@ public class UidFieldTypeTests extends FieldTypeTestCase {
|
|||||||
MappedFieldType ft = createDefaultFieldType();
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
ft.setName("_uid");
|
ft.setName("_uid");
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null));
|
||||||
assertEquals("Field [_uid] of type [_uid] does not support range queries", e.getMessage());
|
assertEquals("Field [_uid] of type [_uid] does not support range queries", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
|
||||||
@ -80,10 +81,11 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(BoolQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||||
if (!queryBuilder.hasClauses()) {
|
if (!queryBuilder.hasClauses()) {
|
||||||
assertThat(query, instanceOf(MatchAllDocsQuery.class));
|
assertThat(query, instanceOf(MatchAllDocsQuery.class));
|
||||||
} else {
|
} else {
|
||||||
|
QueryShardContext context = searchContext.getQueryShardContext();
|
||||||
List<BooleanClause> clauses = new ArrayList<>();
|
List<BooleanClause> clauses = new ArrayList<>();
|
||||||
clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context));
|
clauses.addAll(getBooleanClauses(queryBuilder.must(), BooleanClause.Occur.MUST, context));
|
||||||
clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context));
|
clauses.addAll(getBooleanClauses(queryBuilder.mustNot(), BooleanClause.Occur.MUST_NOT, context));
|
||||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -43,9 +44,9 @@ public class BoostingQueryBuilderTests extends AbstractQueryTestCase<BoostingQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
Query positive = queryBuilder.positiveQuery().toQuery(context);
|
Query positive = queryBuilder.positiveQuery().toQuery(context.getQueryShardContext());
|
||||||
Query negative = queryBuilder.negativeQuery().toQuery(context);
|
Query negative = queryBuilder.negativeQuery().toQuery(context.getQueryShardContext());
|
||||||
if (positive == null || negative == null) {
|
if (positive == null || negative == null) {
|
||||||
assertThat(query, nullValue());
|
assertThat(query, nullValue());
|
||||||
} else {
|
} else {
|
||||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||||||
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -99,7 +100,7 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(CommonTermsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(ExtendedCommonTermsQuery.class));
|
assertThat(query, instanceOf(ExtendedCommonTermsQuery.class));
|
||||||
ExtendedCommonTermsQuery extendedCommonTermsQuery = (ExtendedCommonTermsQuery) query;
|
ExtendedCommonTermsQuery extendedCommonTermsQuery = (ExtendedCommonTermsQuery) query;
|
||||||
assertThat(extendedCommonTermsQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.highFreqMinimumShouldMatch()));
|
assertThat(extendedCommonTermsQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo(queryBuilder.highFreqMinimumShouldMatch()));
|
||||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
|||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -45,8 +46,8 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(ConstantScoreQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(ConstantScoreQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
Query innerQuery = queryBuilder.innerQuery().toQuery(context);
|
Query innerQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext());
|
||||||
if (innerQuery == null) {
|
if (innerQuery == null) {
|
||||||
assertThat(query, nullValue());
|
assertThat(query, nullValue());
|
||||||
} else {
|
} else {
|
||||||
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.PrefixQuery;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -57,8 +58,8 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
Collection<Query> queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context);
|
Collection<Query> queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context.getQueryShardContext());
|
||||||
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
|
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
|
||||||
DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query;
|
DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query;
|
||||||
assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker()));
|
assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker()));
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -55,9 +56,9 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase<ExistsQueryBu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
String fieldPattern = queryBuilder.fieldName();
|
String fieldPattern = queryBuilder.fieldName();
|
||||||
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
|
Collection<String> fields = context.getQueryShardContext().simpleMatchToIndexNames(fieldPattern);
|
||||||
if (getCurrentTypes().length == 0) {
|
if (getCurrentTypes().length == 0) {
|
||||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||||
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
|
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
|
||||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
|
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -43,16 +44,16 @@ public class FieldMaskingSpanQueryBuilderTests extends AbstractQueryTestCase<Fie
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
String fieldInQuery = queryBuilder.fieldName();
|
String fieldInQuery = queryBuilder.fieldName();
|
||||||
MappedFieldType fieldType = context.fieldMapper(fieldInQuery);
|
MappedFieldType fieldType = context.getQueryShardContext().fieldMapper(fieldInQuery);
|
||||||
if (fieldType != null) {
|
if (fieldType != null) {
|
||||||
fieldInQuery = fieldType.name();
|
fieldInQuery = fieldType.name();
|
||||||
}
|
}
|
||||||
assertThat(query, instanceOf(FieldMaskingSpanQuery.class));
|
assertThat(query, instanceOf(FieldMaskingSpanQuery.class));
|
||||||
FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query;
|
FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query;
|
||||||
assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery));
|
assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery));
|
||||||
assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context)));
|
assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context.getQueryShardContext())));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIllegalArguments() {
|
public void testIllegalArguments() {
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.FuzzyQuery;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -72,7 +73,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(FuzzyQuery.class));
|
assertThat(query, instanceOf(FuzzyQuery.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery;
|
import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||||
import org.locationtech.spatial4j.io.GeohashUtils;
|
import org.locationtech.spatial4j.io.GeohashUtils;
|
||||||
@ -254,8 +255,9 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, SearchContext searchContext)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
QueryShardContext context = searchContext.getQueryShardContext();
|
||||||
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery);
|
assertTrue("Found no indexed geo query.", query instanceof MatchNoDocsQuery);
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.document.LatLonPoint;
|
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery;
|
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery;
|
||||||
@ -31,6 +30,7 @@ import org.elasticsearch.common.geo.GeoUtils;
|
|||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||||
import org.locationtech.spatial4j.shape.Point;
|
import org.locationtech.spatial4j.shape.Point;
|
||||||
@ -130,8 +130,8 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
Version version = context.indexVersionCreated();
|
Version version = context.getQueryShardContext().indexVersionCreated();
|
||||||
if (version.before(Version.V_2_2_0)) {
|
if (version.before(Version.V_2_2_0)) {
|
||||||
assertLegacyQuery(queryBuilder, query);
|
assertLegacyQuery(queryBuilder, query);
|
||||||
} else {
|
} else {
|
||||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
|||||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||||
|
|
||||||
@ -118,9 +119,9 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, SearchContext context)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Version version = context.indexVersionCreated();
|
Version version = context.getQueryShardContext().indexVersionCreated();
|
||||||
if (version.before(Version.V_2_2_0)) {
|
if (version.before(Version.V_2_2_0)) {
|
||||||
assertLegacyQuery(queryBuilder, query);
|
assertLegacyQuery(queryBuilder, query);
|
||||||
} else {
|
} else {
|
||||||
|
@ -20,8 +20,6 @@
|
|||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import com.vividsolutions.jts.geom.Coordinate;
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
import org.apache.lucene.document.LatLonPoint;
|
|
||||||
import org.apache.lucene.geo.Polygon;
|
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery;
|
import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery;
|
||||||
@ -34,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||||
@ -67,8 +66,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
Version version = context.indexVersionCreated();
|
Version version = context.getQueryShardContext().indexVersionCreated();
|
||||||
if (version.before(Version.V_2_2_0)) {
|
if (version.before(Version.V_2_2_0)) {
|
||||||
assertLegacyQuery(queryBuilder, query);
|
assertLegacyQuery(queryBuilder, query);
|
||||||
} else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
} else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.get.GetResult;
|
import org.elasticsearch.index.get.GetResult;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||||
@ -133,7 +134,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
// Logic for doToQuery is complex and is hard to test here. Need to rely
|
// Logic for doToQuery is complex and is hard to test here. Need to rely
|
||||||
// on Integration tests to determine if created query is correct
|
// on Integration tests to determine if created query is correct
|
||||||
// TODO improve GeoShapeQueryBuilder.doToQuery() method to make it
|
// TODO improve GeoShapeQueryBuilder.doToQuery() method to make it
|
||||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
|
|||||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
|
||||||
import org.elasticsearch.index.query.GeohashCellQuery.Builder;
|
import org.elasticsearch.index.query.GeohashCellQuery.Builder;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||||
import org.locationtech.spatial4j.shape.Point;
|
import org.locationtech.spatial4j.shape.Point;
|
||||||
@ -64,7 +65,7 @@ public class GeohashCellQueryBuilderTests extends AbstractQueryTestCase<Builder>
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(Builder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(Builder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
if (queryBuilder.neighbors()) {
|
if (queryBuilder.neighbors()) {
|
||||||
assertThat(query, instanceOf(TermsQuery.class));
|
assertThat(query, instanceOf(TermsQuery.class));
|
||||||
} else {
|
} else {
|
||||||
|
@ -120,7 +120,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||||
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
||||||
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
||||||
assertEquals(queryBuilder.minChildren(), lpq.getMinChildren());
|
assertEquals(queryBuilder.minChildren(), lpq.getMinChildren());
|
||||||
@ -129,9 +129,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||||||
if (queryBuilder.innerHit() != null) {
|
if (queryBuilder.innerHit() != null) {
|
||||||
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
||||||
// doCreateTestQueryBuilder)
|
// doCreateTestQueryBuilder)
|
||||||
queryBuilder = (HasChildQueryBuilder) queryBuilder.rewrite(context);
|
queryBuilder = (HasChildQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
|
||||||
SearchContext searchContext = SearchContext.current();
|
|
||||||
assertNotNull(searchContext);
|
|
||||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||||
for (InnerHitBuilder builder : innerHitBuilders.values()) {
|
for (InnerHitBuilder builder : innerHitBuilders.values()) {
|
||||||
|
@ -100,7 +100,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||||
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
|
||||||
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
|
||||||
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
|
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
|
||||||
@ -108,9 +108,8 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||||||
if (queryBuilder.innerHit() != null) {
|
if (queryBuilder.innerHit() != null) {
|
||||||
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
||||||
// doCreateTestQueryBuilder)
|
// doCreateTestQueryBuilder)
|
||||||
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(context);
|
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
|
||||||
|
|
||||||
SearchContext searchContext = SearchContext.current();
|
|
||||||
assertNotNull(searchContext);
|
assertNotNull(searchContext);
|
||||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
|||||||
import org.elasticsearch.common.ParseFieldMatcher;
|
import org.elasticsearch.common.ParseFieldMatcher;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -80,7 +81,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
if (queryBuilder.ids().size() == 0) {
|
if (queryBuilder.ids().size() == 0) {
|
||||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||||
} else {
|
} else {
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -50,12 +51,12 @@ public class IndicesQueryBuilderTests extends AbstractQueryTestCase<IndicesQuery
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(IndicesQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(IndicesQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
Query expected;
|
Query expected;
|
||||||
if (queryBuilder.indices().length == 1 && getIndex().getName().equals(queryBuilder.indices()[0])) {
|
if (queryBuilder.indices().length == 1 && getIndex().getName().equals(queryBuilder.indices()[0])) {
|
||||||
expected = queryBuilder.innerQuery().toQuery(context);
|
expected = queryBuilder.innerQuery().toQuery(context.getQueryShardContext());
|
||||||
} else {
|
} else {
|
||||||
expected = queryBuilder.noMatchQuery().toQuery(context);
|
expected = queryBuilder.noMatchQuery().toQuery(context.getQueryShardContext());
|
||||||
}
|
}
|
||||||
assertEquals(expected, query);
|
assertEquals(expected, query);
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||||||
|
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -35,7 +36,7 @@ public class MatchAllQueryBuilderTests extends AbstractQueryTestCase<MatchAllQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(MatchAllDocsQuery.class));
|
assertThat(query, instanceOf(MatchAllDocsQuery.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -35,7 +36,7 @@ public class MatchNoneQueryBuilderTests extends AbstractQueryTestCase<MatchNoneQ
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MatchNoneQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(MatchNoneQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.TermQuery;
|
|||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -88,7 +89,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, SearchContext context)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
assertThat(query, notNullValue());
|
assertThat(query, notNullValue());
|
||||||
assertThat(query,
|
assertThat(query,
|
||||||
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -84,7 +85,7 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, notNullValue());
|
assertThat(query, notNullValue());
|
||||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
|
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
|
||||||
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||||||
import org.elasticsearch.index.search.MatchQuery;
|
import org.elasticsearch.index.search.MatchQuery;
|
||||||
import org.elasticsearch.index.search.MatchQuery.Type;
|
import org.elasticsearch.index.search.MatchQuery.Type;
|
||||||
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
|
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.hamcrest.Matcher;
|
import org.hamcrest.Matcher;
|
||||||
|
|
||||||
@ -135,7 +136,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(MatchQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||||
assertThat(query, notNullValue());
|
assertThat(query, notNullValue());
|
||||||
|
|
||||||
if (query instanceof MatchAllDocsQuery) {
|
if (query instanceof MatchAllDocsQuery) {
|
||||||
@ -160,7 +161,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||||||
.or(instanceOf(PointRangeQuery.class)));
|
.or(instanceOf(PointRangeQuery.class)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
QueryShardContext context = searchContext.getQueryShardContext();
|
||||||
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
MappedFieldType fieldType = context.fieldMapper(queryBuilder.fieldName());
|
||||||
if (query instanceof TermQuery && fieldType != null) {
|
if (query instanceof TermQuery && fieldType != null) {
|
||||||
String queryValue = queryBuilder.value().toString();
|
String queryValue = queryBuilder.value().toString();
|
||||||
|
@ -40,6 +40,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.VersionType;
|
import org.elasticsearch.index.VersionType;
|
||||||
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
|
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
@ -243,7 +244,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) {
|
if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) {
|
||||||
assertThat(query, instanceOf(BooleanQuery.class));
|
assertThat(query, instanceOf(BooleanQuery.class));
|
||||||
} else {
|
} else {
|
||||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
|||||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type;
|
import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type;
|
||||||
import org.elasticsearch.index.search.MatchQuery;
|
import org.elasticsearch.index.search.MatchQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -142,7 +143,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
// we rely on integration tests for deeper checks here
|
// we rely on integration tests for deeper checks here
|
||||||
assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
|
assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
|
||||||
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
|
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
|
||||||
|
@ -89,7 +89,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
|
||||||
QueryBuilder innerQueryBuilder = queryBuilder.query();
|
QueryBuilder innerQueryBuilder = queryBuilder.query();
|
||||||
assertThat(query, instanceOf(ToParentBlockJoinQuery.class));
|
assertThat(query, instanceOf(ToParentBlockJoinQuery.class));
|
||||||
ToParentBlockJoinQuery parentBlockJoinQuery = (ToParentBlockJoinQuery) query;
|
ToParentBlockJoinQuery parentBlockJoinQuery = (ToParentBlockJoinQuery) query;
|
||||||
@ -97,9 +97,8 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
|||||||
if (queryBuilder.innerHit() != null) {
|
if (queryBuilder.innerHit() != null) {
|
||||||
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
|
||||||
// doCreateTestQueryBuilder)
|
// doCreateTestQueryBuilder)
|
||||||
queryBuilder = (NestedQueryBuilder) queryBuilder.rewrite(context);
|
queryBuilder = (NestedQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
|
||||||
|
|
||||||
SearchContext searchContext = SearchContext.current();
|
|
||||||
assertNotNull(searchContext);
|
assertNotNull(searchContext);
|
||||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
|||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.TypeFieldMapper;
|
import org.elasticsearch.index.mapper.TypeFieldMapper;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
|
||||||
@ -69,7 +70,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, Matchers.instanceOf(BooleanQuery.class));
|
assertThat(query, Matchers.instanceOf(BooleanQuery.class));
|
||||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||||
assertThat(booleanQuery.clauses().size(), Matchers.equalTo(2));
|
assertThat(booleanQuery.clauses().size(), Matchers.equalTo(2));
|
||||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.MultiTermQuery;
|
|||||||
import org.apache.lucene.search.PrefixQuery;
|
import org.apache.lucene.search.PrefixQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -65,7 +66,7 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(PrefixQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(PrefixQuery.class));
|
assertThat(query, instanceOf(PrefixQuery.class));
|
||||||
PrefixQuery prefixQuery = (PrefixQuery) query;
|
PrefixQuery prefixQuery = (PrefixQuery) query;
|
||||||
assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName()));
|
assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName()));
|
||||||
|
@ -42,11 +42,12 @@ public class QueryRewriteContextTests extends ESTestCase {
|
|||||||
.put("index.number_of_shards", 1)
|
.put("index.number_of_shards", 1)
|
||||||
.put("index.number_of_replicas", 1)
|
.put("index.number_of_replicas", 1)
|
||||||
);
|
);
|
||||||
|
final long nowInMills = randomPositiveLong();
|
||||||
IndicesQueriesRegistry indicesQueriesRegistry = new SearchModule(Settings.EMPTY, false, emptyList()).getQueryParserRegistry();
|
IndicesQueriesRegistry indicesQueriesRegistry = new SearchModule(Settings.EMPTY, false, emptyList()).getQueryParserRegistry();
|
||||||
IndexSettings indexSettings = new IndexSettings(indexMetadata.build(),
|
IndexSettings indexSettings = new IndexSettings(indexMetadata.build(),
|
||||||
Settings.builder().put(ScriptSettings.LEGACY_SCRIPT_SETTING, defaultLegacyScriptLanguage).build());
|
Settings.builder().put(ScriptSettings.LEGACY_SCRIPT_SETTING, defaultLegacyScriptLanguage).build());
|
||||||
QueryRewriteContext queryRewriteContext =
|
QueryRewriteContext queryRewriteContext =
|
||||||
new QueryRewriteContext(indexSettings, null, null, indicesQueriesRegistry, null, null, null);;
|
new QueryRewriteContext(indexSettings, null, null, indicesQueriesRegistry, null, null, null, () -> nowInMills);
|
||||||
|
|
||||||
// verify that the default script language in the query parse context is equal to defaultLegacyScriptLanguage variable:
|
// verify that the default script language in the query parse context is equal to defaultLegacyScriptLanguage variable:
|
||||||
QueryParseContext queryParseContext =
|
QueryParseContext queryParseContext =
|
||||||
|
@ -48,7 +48,7 @@ public class QueryShardContextTests extends ESTestCase {
|
|||||||
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
|
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
|
||||||
final long nowInMillis = randomPositiveLong();
|
final long nowInMillis = randomPositiveLong();
|
||||||
QueryShardContext context = new QueryShardContext(
|
QueryShardContext context = new QueryShardContext(
|
||||||
indexSettings, null, null, mapperService, null, null, null, null, null, null,
|
0, indexSettings, null, null, mapperService, null, null, null, null, null, null,
|
||||||
() -> nowInMillis);
|
() -> nowInMillis);
|
||||||
|
|
||||||
context.setAllowUnmappedFields(false);
|
context.setAllowUnmappedFields(false);
|
||||||
|
@ -39,6 +39,7 @@ import org.apache.lucene.search.TermQuery;
|
|||||||
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
|
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
|
||||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
@ -155,7 +156,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder,
|
protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder,
|
||||||
Query query, QueryShardContext context) throws IOException {
|
Query query, SearchContext context) throws IOException {
|
||||||
if ("".equals(queryBuilder.queryString())) {
|
if ("".equals(queryBuilder.queryString())) {
|
||||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||||
} else {
|
} else {
|
||||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.ParsingException;
|
|||||||
import org.elasticsearch.common.lucene.BytesRefs;
|
import org.elasticsearch.common.lucene.BytesRefs;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
@ -116,7 +117,7 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
|
if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
|
||||||
assertThat(query, instanceOf(TermRangeQuery.class));
|
assertThat(query, instanceOf(TermRangeQuery.class));
|
||||||
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
|
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
|
||||||
@ -328,7 +329,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
"}";
|
"}";
|
||||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
|
QueryShardContext context = createShardContext();
|
||||||
|
Query parsedQuery = parseQuery(query).toQuery(context).rewrite(null);
|
||||||
if (parsedQuery instanceof PointRangeQuery) {
|
if (parsedQuery instanceof PointRangeQuery) {
|
||||||
// TODO what can we assert
|
// TODO what can we assert
|
||||||
} else {
|
} else {
|
||||||
@ -336,13 +338,13 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||||||
|
|
||||||
// Min value was 2012-01-01 (UTC) so we need to remove one hour
|
// Min value was 2012-01-01 (UTC) so we need to remove one hour
|
||||||
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
|
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
|
||||||
// Max value is when we started the test. So it should be some ms from now
|
// Max value is the nowInMillis set by the uery shard context
|
||||||
DateTime max = new DateTime(startDate, DateTimeZone.UTC);
|
long max = context.nowInMillis();
|
||||||
|
|
||||||
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
|
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
|
||||||
|
|
||||||
// We should not have a big difference here (should be some ms)
|
// We should not have a big difference here (should be some ms)
|
||||||
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L));
|
assertThat(((LegacyNumericRangeQuery) parsedQuery).getMax().longValue() - max, lessThanOrEqualTo(60000L));
|
||||||
}
|
}
|
||||||
|
|
||||||
query = "{\n" +
|
query = "{\n" +
|
||||||
|
@ -36,8 +36,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||||||
public void testRewriteMissingField() throws Exception {
|
public void testRewriteMissingField() throws Exception {
|
||||||
IndexService indexService = createIndex("test");
|
IndexService indexService = createIndex("test");
|
||||||
IndexReader reader = new MultiReader();
|
IndexReader reader = new MultiReader();
|
||||||
QueryRewriteContext context = new QueryRewriteContext(indexService.getIndexSettings(),
|
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||||
indexService.mapperService(), null, null, null, reader, null);
|
null, null, null, null, reader, null, null);
|
||||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||||
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
||||||
}
|
}
|
||||||
@ -53,8 +53,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
indexService.mapperService().merge("type",
|
indexService.mapperService().merge("type",
|
||||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||||
QueryRewriteContext context = new QueryRewriteContext(indexService.getIndexSettings(),
|
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||||
indexService.mapperService(), null, null, null, null, null);
|
null, null, null, null, null, null, null);
|
||||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||||
// can't make assumptions on a missing reader, so it must return INTERSECT
|
// can't make assumptions on a missing reader, so it must return INTERSECT
|
||||||
assertEquals(Relation.INTERSECTS, range.getRelation(context));
|
assertEquals(Relation.INTERSECTS, range.getRelation(context));
|
||||||
@ -72,8 +72,8 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
|
|||||||
indexService.mapperService().merge("type",
|
indexService.mapperService().merge("type",
|
||||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||||
IndexReader reader = new MultiReader();
|
IndexReader reader = new MultiReader();
|
||||||
QueryRewriteContext context = new QueryRewriteContext(indexService.getIndexSettings(),
|
QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(),
|
||||||
indexService.mapperService(), null, null, null, reader, null);
|
null, null, null, null, reader, null, null);
|
||||||
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
RangeQueryBuilder range = new RangeQueryBuilder("foo");
|
||||||
// no values -> DISJOINT
|
// no values -> DISJOINT
|
||||||
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
assertEquals(Relation.DISJOINT, range.getRelation(context));
|
||||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.RegexpQuery;
|
import org.apache.lucene.search.RegexpQuery;
|
||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -76,7 +77,7 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(RegexpQuery.class));
|
assertThat(query, instanceOf(RegexpQuery.class));
|
||||||
RegexpQuery regexpQuery = (RegexpQuery) query;
|
RegexpQuery regexpQuery = (RegexpQuery) query;
|
||||||
assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName()));
|
assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName()));
|
||||||
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.elasticsearch.script.MockScriptEngine;
|
import org.elasticsearch.script.MockScriptEngine;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -41,7 +42,7 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase<ScriptQueryBu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(ScriptQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(ScriptQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(ScriptQueryBuilder.ScriptQuery.class));
|
assertThat(query, instanceOf(ScriptQueryBuilder.ScriptQuery.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -241,7 +242,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||||||
* actual functionality of query parsing.
|
* actual functionality of query parsing.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(SimpleQueryStringBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, notNullValue());
|
assertThat(query, notNullValue());
|
||||||
|
|
||||||
if ("".equals(queryBuilder.value())) {
|
if ("".equals(queryBuilder.value())) {
|
||||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.spans.SpanContainingQuery;
|
import org.apache.lucene.search.spans.SpanContainingQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -35,7 +36,7 @@ public class SpanContainingQueryBuilderTests extends AbstractQueryTestCase<SpanC
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(SpanContainingQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(SpanContainingQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(SpanContainingQuery.class));
|
assertThat(query, instanceOf(SpanContainingQuery.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.spans.SpanFirstQuery;
|
|||||||
import org.elasticsearch.common.ParsingException;
|
import org.elasticsearch.common.ParsingException;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -39,7 +40,7 @@ public class SpanFirstQueryBuilderTests extends AbstractQueryTestCase<SpanFirstQ
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(SpanFirstQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(SpanFirstQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
assertThat(query, instanceOf(SpanFirstQuery.class));
|
assertThat(query, instanceOf(SpanFirstQuery.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
|
|||||||
import org.apache.lucene.search.spans.SpanBoostQuery;
|
import org.apache.lucene.search.spans.SpanBoostQuery;
|
||||||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||||
import org.apache.lucene.search.spans.SpanQuery;
|
import org.apache.lucene.search.spans.SpanQuery;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -41,7 +42,7 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||||
if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
|
if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
|
||||||
assertThat(query, instanceOf(SpanBoostQuery.class));
|
assertThat(query, instanceOf(SpanBoostQuery.class));
|
||||||
SpanBoostQuery boostQuery = (SpanBoostQuery) query;
|
SpanBoostQuery boostQuery = (SpanBoostQuery) query;
|
||||||
@ -50,7 +51,7 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase<SpanMu
|
|||||||
}
|
}
|
||||||
assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class));
|
assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class));
|
||||||
SpanMultiTermQueryWrapper spanMultiTermQueryWrapper = (SpanMultiTermQueryWrapper) query;
|
SpanMultiTermQueryWrapper spanMultiTermQueryWrapper = (SpanMultiTermQueryWrapper) query;
|
||||||
Query multiTermQuery = queryBuilder.innerQuery().toQuery(context);
|
Query multiTermQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext());
|
||||||
if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
|
if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
|
||||||
assertThat(multiTermQuery, instanceOf(BoostQuery.class));
|
assertThat(multiTermQuery, instanceOf(BoostQuery.class));
|
||||||
BoostQuery boostQuery = (BoostQuery) multiTermQuery;
|
BoostQuery boostQuery = (BoostQuery) multiTermQuery;
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user