Lessen leniency of the query dsl. #18276
This change does the following: - Queries that are currently unsupported such as prefix queries on numeric fields or term queries on geo fields now throw an error rather than returning a query that does not match anything. - Fuzzy queries on numeric, date and ip fields are now unsupported: they used to create range queries, we now expect users to use range queries directly. Fuzzy, regexp and prefix queries are now only supported on text/keyword fields (including `_all`). - The `_uid` and `_id` fields do not support prefix or range queries anymore as it would prevent us to store them more efficiently in the future, eg. by using a binary encoding. Note that it is still possible to ignore these errors by using the `lenient` option of the `match` or `query_string` queries.
This commit is contained in:
parent
83df20b83b
commit
864ed04059
|
@ -151,66 +151,7 @@ public final class Fuzziness implements ToXContent, Writeable {
|
|||
return 1;
|
||||
}
|
||||
}
|
||||
return Math.min(2, asInt());
|
||||
}
|
||||
|
||||
public TimeValue asTimeValue() {
|
||||
if (this.equals(AUTO)) {
|
||||
return TimeValue.timeValueMillis(1);
|
||||
} else {
|
||||
return TimeValue.parseTimeValue(fuzziness.toString(), null, "fuzziness");
|
||||
}
|
||||
}
|
||||
|
||||
public long asLong() {
|
||||
if (this.equals(AUTO)) {
|
||||
return 1;
|
||||
}
|
||||
try {
|
||||
return Long.parseLong(fuzziness.toString());
|
||||
} catch (NumberFormatException ex) {
|
||||
return (long) Double.parseDouble(fuzziness.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public int asInt() {
|
||||
if (this.equals(AUTO)) {
|
||||
return 1;
|
||||
}
|
||||
try {
|
||||
return Integer.parseInt(fuzziness.toString());
|
||||
} catch (NumberFormatException ex) {
|
||||
return (int) Float.parseFloat(fuzziness.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public short asShort() {
|
||||
if (this.equals(AUTO)) {
|
||||
return 1;
|
||||
}
|
||||
try {
|
||||
return Short.parseShort(fuzziness.toString());
|
||||
} catch (NumberFormatException ex) {
|
||||
return (short) Float.parseFloat(fuzziness.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public byte asByte() {
|
||||
if (this.equals(AUTO)) {
|
||||
return 1;
|
||||
}
|
||||
try {
|
||||
return Byte.parseByte(fuzziness.toString());
|
||||
} catch (NumberFormatException ex) {
|
||||
return (byte) Float.parseFloat(fuzziness.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public double asDouble() {
|
||||
if (this.equals(AUTO)) {
|
||||
return 1d;
|
||||
}
|
||||
return Double.parseDouble(fuzziness.toString());
|
||||
return Math.min(2, (int) asFloat());
|
||||
}
|
||||
|
||||
public float asFloat() {
|
||||
|
|
|
@ -25,21 +25,16 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -312,13 +307,6 @@ public abstract class MappedFieldType extends FieldType {
|
|||
return value;
|
||||
}
|
||||
|
||||
/** Returns the indexed value used to construct search "values".
|
||||
* This method is used for the default implementations of most
|
||||
* query factory methods such as {@link #termQuery}. */
|
||||
protected BytesRef indexedValueForSearch(Object value) {
|
||||
return BytesRefs.toBytesRef(value);
|
||||
}
|
||||
|
||||
/** Returns true if the field is searchable.
|
||||
*
|
||||
*/
|
||||
|
@ -342,50 +330,33 @@ public abstract class MappedFieldType extends FieldType {
|
|||
* The default implementation returns a {@link TermQuery} over the value bytes,
|
||||
* boosted by {@link #boost()}.
|
||||
* @throws IllegalArgumentException if {@code value} cannot be converted to the expected data type */
|
||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
TermQuery query = new TermQuery(new Term(name(), indexedValueForSearch(value)));
|
||||
if (boost == 1f ||
|
||||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0_alpha1))) {
|
||||
return query;
|
||||
}
|
||||
return new BoostQuery(query, boost);
|
||||
}
|
||||
public abstract Query termQuery(Object value, @Nullable QueryShardContext context);
|
||||
|
||||
public Query termsQuery(List values, @Nullable QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||
for (int i = 0; i < bytesRefs.length; i++) {
|
||||
bytesRefs[i] = indexedValueForSearch(values.get(i));
|
||||
/** Build a constant-scoring query that matches all values. The default implementation uses a
|
||||
* {@link ConstantScoreQuery} around a {@link BooleanQuery} whose {@link Occur#SHOULD} clauses
|
||||
* are generated with {@link #termQuery}. */
|
||||
public Query termsQuery(List<?> values, @Nullable QueryShardContext context) {
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
for (Object value : values) {
|
||||
builder.add(termQuery(value, context), Occur.SHOULD);
|
||||
}
|
||||
return new TermsQuery(name(), bytesRefs);
|
||||
return new ConstantScoreQuery(builder.build());
|
||||
}
|
||||
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
failIfNotIndexed();
|
||||
return new TermRangeQuery(name(),
|
||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||
upperTerm == null ? null : indexedValueForSearch(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
throw new IllegalArgumentException("Field [" + name + "] of type [" + typeName() + "] does not support range queries");
|
||||
}
|
||||
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
failIfNotIndexed();
|
||||
return new FuzzyQuery(new Term(name(), indexedValueForSearch(value)),
|
||||
fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions);
|
||||
throw new IllegalArgumentException("Can only use fuzzy queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
|
||||
}
|
||||
|
||||
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value)));
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
|
||||
}
|
||||
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Can only use regular expression on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]");
|
||||
}
|
||||
|
||||
public Query nullValueQuery() {
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
/** Base class for {@link MappedFieldType} implementations that use the same
|
||||
* representation for internal index terms as the external representation so
|
||||
* that partial matching queries such as prefix, wildcard and fuzzy queries
|
||||
* can be implemented. */
|
||||
public abstract class StringFieldType extends TermBasedFieldType {
|
||||
|
||||
public StringFieldType() {}
|
||||
|
||||
protected StringFieldType(MappedFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
public Query termsQuery(List<?> values, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||
for (int i = 0; i < bytesRefs.length; i++) {
|
||||
bytesRefs[i] = indexedValueForSearch(values.get(i));
|
||||
}
|
||||
return new TermsQuery(name(), bytesRefs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions,
|
||||
boolean transpositions) {
|
||||
failIfNotIndexed();
|
||||
return new FuzzyQuery(new Term(name(), indexedValueForSearch(value)),
|
||||
fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value)));
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query regexpQuery(String value, int flags, int maxDeterminizedStates,
|
||||
MultiTermQuery.RewriteMethod method, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
failIfNotIndexed();
|
||||
return new TermRangeQuery(name(),
|
||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||
upperTerm == null ? null : indexedValueForSearch(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
/** Base {@link MappedFieldType} implementation for a field that is indexed
|
||||
* with the inverted index. */
|
||||
public abstract class TermBasedFieldType extends MappedFieldType {
|
||||
|
||||
public TermBasedFieldType() {}
|
||||
|
||||
protected TermBasedFieldType(MappedFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
/** Returns the indexed value used to construct search "values".
|
||||
* This method is used for the default implementations of most
|
||||
* query factory methods such as {@link #termQuery}. */
|
||||
protected BytesRef indexedValueForSearch(Object value) {
|
||||
return BytesRefs.toBytesRef(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
TermQuery query = new TermQuery(new Term(name(), indexedValueForSearch(value)));
|
||||
if (boost() == 1f ||
|
||||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0_alpha1))) {
|
||||
return query;
|
||||
}
|
||||
return new BoostQuery(query, boost());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termsQuery(List<?> values, QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||
for (int i = 0; i < bytesRefs.length; i++) {
|
||||
bytesRefs[i] = indexedValueForSearch(values.get(i));
|
||||
}
|
||||
return new TermsQuery(name(), bytesRefs);
|
||||
}
|
||||
|
||||
}
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.ByteArrayDataOutput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -40,6 +41,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -135,6 +138,11 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
failIfNoDocValues();
|
||||
return new BytesBinaryDVIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Binary fields do not support searching");
|
||||
}
|
||||
}
|
||||
|
||||
protected BinaryFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
|
|
|
@ -22,10 +22,11 @@ package org.elasticsearch.index.mapper.core;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -38,6 +39,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -48,7 +50,6 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
/**
|
||||
* A field mapper for boolean fields.
|
||||
|
@ -119,7 +120,7 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class BooleanFieldType extends MappedFieldType {
|
||||
public static final class BooleanFieldType extends TermBasedFieldType {
|
||||
|
||||
public BooleanFieldType() {}
|
||||
|
||||
|
@ -200,6 +201,15 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
return DocValueFormat.BOOLEAN;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
failIfNotIndexed();
|
||||
return new TermRangeQuery(name(),
|
||||
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
|
||||
upperTerm == null ? null : indexedValueForSearch(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
}
|
||||
}
|
||||
|
||||
protected BooleanFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
|
||||
|
@ -178,7 +179,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||
}
|
||||
}
|
||||
|
||||
public static final class CompletionFieldType extends MappedFieldType {
|
||||
public static final class CompletionFieldType extends TermBasedFieldType {
|
||||
|
||||
private static PostingsFormat postingsFormat;
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperException;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.search.suggest.completion2x.AnalyzingCompletionLookupProvider;
|
||||
import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat;
|
||||
import org.elasticsearch.search.suggest.completion2x.CompletionTokenStream;
|
||||
|
@ -231,7 +232,7 @@ public class CompletionFieldMapper2x extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class CompletionFieldType extends MappedFieldType {
|
||||
public static final class CompletionFieldType extends TermBasedFieldType {
|
||||
private PostingsFormat postingsFormat;
|
||||
private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
|
||||
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
|
||||
|
|
|
@ -316,21 +316,6 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
failIfNotIndexed();
|
||||
long baseLo = parseToMilliseconds(value, false, null, dateMathParser);
|
||||
long baseHi = parseToMilliseconds(value, true, null, dateMathParser);
|
||||
long delta;
|
||||
try {
|
||||
delta = fuzziness.asTimeValue().millis();
|
||||
} catch (Exception e) {
|
||||
// not a time format
|
||||
delta = fuzziness.asLong();
|
||||
}
|
||||
return LongPoint.newRangeQuery(name(), baseLo - delta, baseHi + delta);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
failIfNotIndexed();
|
||||
|
|
|
@ -22,13 +22,8 @@ package org.elasticsearch.index.mapper.core;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -40,8 +35,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -143,7 +138,7 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
|
|||
}
|
||||
}
|
||||
|
||||
public static final class KeywordFieldType extends MappedFieldType {
|
||||
public static final class KeywordFieldType extends StringFieldType {
|
||||
|
||||
public KeywordFieldType() {}
|
||||
|
||||
|
@ -173,17 +168,6 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
|
|||
failIfNoDocValues();
|
||||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates,
|
||||
@Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -159,16 +158,6 @@ public class LegacyByteFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
byte iValue = parseValue(value);
|
||||
byte iSim = fuzziness.asByte();
|
||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Long stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -358,22 +358,6 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = parseValue(value);
|
||||
long iSim;
|
||||
try {
|
||||
iSim = fuzziness.asTimeValue().millis();
|
||||
} catch (Exception e) {
|
||||
// not a time format
|
||||
iSim = fuzziness.asLong();
|
||||
}
|
||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Date stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.action.fieldstats.FieldStats;
|
|||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Numbers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -170,16 +169,6 @@ public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
double iValue = parseDoubleValue(value);
|
||||
double iSim = fuzziness.asDouble();
|
||||
return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Double stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -155,16 +154,6 @@ public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
float iValue = parseValue(value);
|
||||
final float iSim = fuzziness.asFloat();
|
||||
return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Double stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -135,8 +134,7 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
|
|||
|
||||
@Override
|
||||
public String typeName() {
|
||||
// TODO: this should be the same as the mapper type name, except fielddata expects int...
|
||||
return "int";
|
||||
return "integer";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -159,16 +157,6 @@ public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
int iValue = parseValue(value);
|
||||
int iSim = fuzziness.asInt();
|
||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Long stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -158,16 +157,6 @@ public class LegacyLongFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = parseLongValue(value);
|
||||
final long iSim = fuzziness.asLong();
|
||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -27,20 +27,19 @@ import org.apache.lucene.analysis.LegacyNumericTokenStream;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -121,7 +120,7 @@ public abstract class LegacyNumberFieldMapper extends FieldMapper implements All
|
|||
protected abstract int maxPrecisionStep();
|
||||
}
|
||||
|
||||
public static abstract class NumberFieldType extends MappedFieldType {
|
||||
public static abstract class NumberFieldType extends TermBasedFieldType {
|
||||
|
||||
public NumberFieldType(LegacyNumericType numericType) {
|
||||
setTokenized(false);
|
||||
|
@ -146,9 +145,6 @@ public abstract class LegacyNumberFieldMapper extends FieldMapper implements All
|
|||
|
||||
public abstract NumberFieldType clone();
|
||||
|
||||
@Override
|
||||
public abstract Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) {
|
||||
if (timeZone != null) {
|
||||
|
|
|
@ -163,16 +163,6 @@ public class LegacyShortFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
short iValue = parseValue(value);
|
||||
short iSim = fuzziness.asShort();
|
||||
return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Long stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.common.Explicit;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
@ -233,13 +232,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return FloatPoint.newRangeQuery(field, l, u);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) {
|
||||
float base = parse(value);
|
||||
float delta = fuzziness.asFloat();
|
||||
return rangeQuery(field, base - delta, base + delta, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
|
@ -324,13 +316,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return DoublePoint.newRangeQuery(field, l, u);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) {
|
||||
double base = parse(value);
|
||||
double delta = fuzziness.asFloat();
|
||||
return rangeQuery(field, base - delta, base + delta, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
|
@ -407,11 +392,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) {
|
||||
return INTEGER.fuzzyQuery(field, value, fuzziness);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
|
@ -473,11 +453,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) {
|
||||
return INTEGER.fuzzyQuery(field, value, fuzziness);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
|
@ -560,13 +535,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return IntPoint.newRangeQuery(field, l, u);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) {
|
||||
int base = parse(value);
|
||||
int delta = fuzziness.asInt();
|
||||
return rangeQuery(field, base - delta, base + delta, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
|
@ -663,13 +631,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return LongPoint.newRangeQuery(field, l, u);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query fuzzyQuery(String field, Object value, Fuzziness fuzziness) {
|
||||
long base = parse(value);
|
||||
long delta = fuzziness.asLong();
|
||||
return rangeQuery(field, base - delta, base + delta, true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
|
@ -722,7 +683,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
abstract Query termsQuery(String field, List<Object> values);
|
||||
abstract Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||
boolean includeLower, boolean includeUpper);
|
||||
abstract Query fuzzyQuery(String field, Object value, Fuzziness fuzziness);
|
||||
abstract Number parse(XContentParser parser, boolean coerce) throws IOException;
|
||||
abstract Number parse(Object value);
|
||||
public abstract List<Field> createFields(String name, Number value, boolean indexed,
|
||||
|
@ -791,13 +751,6 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength,
|
||||
int maxExpansions, boolean transpositions) {
|
||||
failIfNotIndexed();
|
||||
return type.fuzzyQuery(name(), value, fuzziness);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats stats(IndexReader reader) throws IOException {
|
||||
return type.stats(reader, name(), isSearchable(), isAggregatable());
|
||||
|
|
|
@ -355,7 +355,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
}
|
||||
|
||||
public static final class StringFieldType extends MappedFieldType {
|
||||
public static final class StringFieldType extends org.elasticsearch.index.mapper.StringFieldType {
|
||||
|
||||
private boolean fielddata;
|
||||
private double fielddataMinFrequency;
|
||||
|
@ -485,15 +485,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
+ "use significant memory.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
|
|
@ -21,12 +21,7 @@ package org.elasticsearch.index.mapper.core;
|
|||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
|
@ -39,8 +34,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -172,7 +167,7 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
}
|
||||
}
|
||||
|
||||
public static final class TextFieldType extends MappedFieldType {
|
||||
public static final class TextFieldType extends StringFieldType {
|
||||
|
||||
private boolean fielddata;
|
||||
private double fielddataMinFrequency;
|
||||
|
@ -300,17 +295,6 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
}
|
||||
return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates,
|
||||
@Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
failIfNotIndexed();
|
||||
RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -47,6 +48,8 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -366,6 +369,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
|||
}
|
||||
return DocValueFormat.GEOHASH;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: [" + name() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
protected FieldMapper latMapper;
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.locationtech.spatial4j.shape.Shape;
|
|||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy;
|
||||
|
@ -46,6 +47,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -412,6 +415,10 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead");
|
||||
}
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> coerce;
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.lucene.all.AllField;
|
||||
|
@ -36,6 +35,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
||||
|
@ -177,7 +177,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
static final class AllFieldType extends MappedFieldType {
|
||||
static final class AllFieldType extends StringFieldType {
|
||||
|
||||
public AllFieldType() {
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -32,6 +31,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -125,7 +125,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class FieldNamesFieldType extends MappedFieldType {
|
||||
public static final class FieldNamesFieldType extends TermBasedFieldType {
|
||||
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
|
@ -89,7 +90,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
static final class IdFieldType extends MappedFieldType {
|
||||
static final class IdFieldType extends TermBasedFieldType {
|
||||
|
||||
public IdFieldType() {
|
||||
}
|
||||
|
@ -116,62 +117,14 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
if (indexOptions() != IndexOptions.NONE || context == null) {
|
||||
return super.termQuery(value, context);
|
||||
}
|
||||
final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value);
|
||||
return new TermsQuery(UidFieldMapper.NAME, uids);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termsQuery(List values, @Nullable QueryShardContext context) {
|
||||
if (indexOptions() != IndexOptions.NONE || context == null) {
|
||||
return super.termsQuery(values, context);
|
||||
}
|
||||
return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
if (indexOptions() != IndexOptions.NONE || context == null) {
|
||||
return super.prefixQuery(value, method, context);
|
||||
}
|
||||
Collection<String> queryTypes = context.queryTypes();
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
for (String queryType : queryTypes) {
|
||||
PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))));
|
||||
if (method != null) {
|
||||
prefixQuery.setRewriteMethod(method);
|
||||
}
|
||||
query.add(prefixQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return query.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
if (indexOptions() != IndexOptions.NONE || context == null) {
|
||||
return super.regexpQuery(value, flags, maxDeterminizedStates, method, context);
|
||||
}
|
||||
Collection<String> queryTypes = context.queryTypes();
|
||||
if (queryTypes.size() == 1) {
|
||||
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))),
|
||||
flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
regexpQuery.setRewriteMethod(method);
|
||||
}
|
||||
return regexpQuery;
|
||||
}
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
for (String queryType : queryTypes) {
|
||||
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
regexpQuery.setRewriteMethod(method);
|
||||
}
|
||||
query.add(regexpQuery, BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return query.build();
|
||||
}
|
||||
}
|
||||
|
||||
private IdFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
|
|
|
@ -130,9 +130,6 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
*/
|
||||
@Override
|
||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
if (context == null) {
|
||||
return super.termQuery(value, context);
|
||||
}
|
||||
if (isSameIndex(value, context.index().getName())) {
|
||||
return Queries.newMatchAllQuery();
|
||||
} else {
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -194,7 +195,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
public Query termsQuery(List values, @Nullable QueryShardContext context) {
|
||||
BytesRef[] ids = new BytesRef[values.size()];
|
||||
for (int i = 0; i < ids.length; i++) {
|
||||
ids[i] = indexedValueForSearch(values.get(i));
|
||||
ids[i] = BytesRefs.toBytesRef(values.get(i));
|
||||
}
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
query.add(new DocValuesTermsQuery(name(), ids), BooleanClause.Occur.MUST);
|
||||
|
|
|
@ -19,10 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -31,6 +29,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -107,7 +106,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
static final class RoutingFieldType extends MappedFieldType {
|
||||
static final class RoutingFieldType extends TermBasedFieldType {
|
||||
|
||||
public RoutingFieldType() {
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -39,6 +40,8 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -167,6 +170,11 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "The _source field is not searchable");
|
||||
}
|
||||
}
|
||||
|
||||
private final boolean enabled;
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -85,7 +86,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
static final class TypeFieldType extends MappedFieldType {
|
||||
static final class TypeFieldType extends StringFieldType {
|
||||
|
||||
public TypeFieldType() {
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
||||
|
@ -83,7 +84,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
static final class UidFieldType extends MappedFieldType {
|
||||
static final class UidFieldType extends TermBasedFieldType {
|
||||
|
||||
public UidFieldType() {
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.internal;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -30,6 +31,8 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
|||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -84,6 +87,11 @@ public class VersionFieldMapper extends MetadataFieldMapper {
|
|||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "The _version field is not searchable");
|
||||
}
|
||||
}
|
||||
|
||||
private VersionFieldMapper(Settings indexSettings) {
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.common.Explicit;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
|
@ -219,14 +218,6 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include
|
|||
return InetAddressPoint.newRangeQuery(name(), lower, upper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
failIfNotIndexed();
|
||||
InetAddress base = parse(value);
|
||||
int mask = fuzziness.asInt();
|
||||
return XInetAddressPoint.newPrefixQuery(name(), base, mask);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats.Ip stats(IndexReader reader) throws IOException {
|
||||
String field = name();
|
||||
|
|
|
@ -231,21 +231,6 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
|
|||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
long iValue = parseValue(value);
|
||||
long iSim;
|
||||
try {
|
||||
iSim = ipToLong(fuzziness.asString());
|
||||
} catch (IllegalArgumentException e) {
|
||||
iSim = fuzziness.asLong();
|
||||
}
|
||||
return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldStats stats(IndexReader reader) throws IOException {
|
||||
int maxDoc = reader.maxDoc();
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.elasticsearch.index.mapper.core.KeywordFieldMapper;
|
|||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -149,6 +150,11 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Percolator fields are not searchable directly, use a percolate query instead");
|
||||
}
|
||||
}
|
||||
|
||||
private final boolean mapUnmappedFieldAsString;
|
||||
|
|
|
@ -231,7 +231,7 @@ public class MatchQuery {
|
|||
*/
|
||||
boolean noForcedAnalyzer = this.analyzer == null;
|
||||
if (fieldType != null && fieldType.tokenized() == false && noForcedAnalyzer) {
|
||||
return termQuery(fieldType, value);
|
||||
return blendTermQuery(new Term(fieldName, value.toString()), fieldType);
|
||||
}
|
||||
|
||||
Analyzer analyzer = getAnalyzer(fieldType);
|
||||
|
@ -265,15 +265,6 @@ public class MatchQuery {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a TermQuery-like-query for MappedFieldTypes that don't support
|
||||
* QueryBuilder which is very string-ish. Just delegates to the
|
||||
* MappedFieldType for MatchQuery but gets more complex for blended queries.
|
||||
*/
|
||||
protected Query termQuery(MappedFieldType fieldType, Object value) {
|
||||
return termQuery(fieldType, value, lenient);
|
||||
}
|
||||
|
||||
protected final Query termQuery(MappedFieldType fieldType, Object value, boolean lenient) {
|
||||
try {
|
||||
return fieldType.termQuery(value, context);
|
||||
|
@ -366,8 +357,11 @@ public class MatchQuery {
|
|||
}
|
||||
return query;
|
||||
} catch (RuntimeException e) {
|
||||
return new TermQuery(term);
|
||||
// See long comment below about why we're lenient here.
|
||||
if (lenient) {
|
||||
return new TermQuery(term);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
int edits = fuzziness.asDistance(term.text());
|
||||
|
@ -376,23 +370,7 @@ public class MatchQuery {
|
|||
return query;
|
||||
}
|
||||
if (fieldType != null) {
|
||||
/*
|
||||
* Its a bit weird to default to lenient here but its the backwards
|
||||
* compatible. It makes some sense when you think about what we are
|
||||
* doing here: at this point the user has forced an analyzer and
|
||||
* passed some string to the match query. We cut it up using the
|
||||
* analyzer and then tried to cram whatever we get into the field.
|
||||
* lenient=true here means that we try the terms in the query and on
|
||||
* the off chance that they are actually valid terms then we
|
||||
* actually try them. lenient=false would mean that we blow up the
|
||||
* query if they aren't valid terms. "valid" in this context means
|
||||
* "parses properly to something of the type being queried." So "1"
|
||||
* is a valid number, etc.
|
||||
*
|
||||
* We use the text form here because we we've received the term from
|
||||
* an analyzer that cut some string into text.
|
||||
*/
|
||||
Query query = termQuery(fieldType, term.bytes(), true);
|
||||
Query query = termQuery(fieldType, term.bytes(), lenient);
|
||||
if (query != null) {
|
||||
return query;
|
||||
}
|
||||
|
|
|
@ -303,15 +303,6 @@ public class MultiMatchQuery extends MatchQuery {
|
|||
return queryBuilder.blendTerm(term, fieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query termQuery(MappedFieldType fieldType, Object value) {
|
||||
if (queryBuilder == null) {
|
||||
// Can be null when the MultiMatchQuery collapses into a MatchQuery
|
||||
return super.termQuery(fieldType, value);
|
||||
}
|
||||
return queryBuilder.termQuery(fieldType, value);
|
||||
}
|
||||
|
||||
static final class FieldAndFieldType {
|
||||
final MappedFieldType fieldType;
|
||||
final float boost;
|
||||
|
|
|
@ -36,12 +36,7 @@ import static org.hamcrest.number.IsCloseTo.closeTo;
|
|||
public class FuzzinessTests extends ESTestCase {
|
||||
public void testNumerics() {
|
||||
String[] options = new String[]{"1.0", "1", "1.000000"};
|
||||
assertThat(Fuzziness.build(randomFrom(options)).asByte(), equalTo((byte) 1));
|
||||
assertThat(Fuzziness.build(randomFrom(options)).asInt(), equalTo(1));
|
||||
assertThat(Fuzziness.build(randomFrom(options)).asFloat(), equalTo(1f));
|
||||
assertThat(Fuzziness.build(randomFrom(options)).asDouble(), equalTo(1d));
|
||||
assertThat(Fuzziness.build(randomFrom(options)).asLong(), equalTo(1L));
|
||||
assertThat(Fuzziness.build(randomFrom(options)).asShort(), equalTo((short) 1));
|
||||
}
|
||||
|
||||
public void testParseFromXContent() throws IOException {
|
||||
|
@ -59,7 +54,6 @@ public class FuzzinessTests extends ESTestCase {
|
|||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER));
|
||||
Fuzziness parse = Fuzziness.parse(parser);
|
||||
assertThat(parse.asFloat(), equalTo(floatValue));
|
||||
assertThat(parse.asDouble(), closeTo(floatValue, 0.000001));
|
||||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
|
||||
}
|
||||
{
|
||||
|
@ -78,10 +72,6 @@ public class FuzzinessTests extends ESTestCase {
|
|||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
|
||||
assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING)));
|
||||
Fuzziness parse = Fuzziness.parse(parser);
|
||||
assertThat(parse.asInt(), equalTo(value.intValue()));
|
||||
assertThat((int) parse.asShort(), equalTo(value.intValue()));
|
||||
assertThat((int) parse.asByte(), equalTo(value.intValue()));
|
||||
assertThat(parse.asLong(), equalTo(value.longValue()));
|
||||
if (value.intValue() >= 1) {
|
||||
assertThat(parse.asDistance(), equalTo(Math.min(2, value.intValue())));
|
||||
}
|
||||
|
@ -118,35 +108,12 @@ public class FuzzinessTests extends ESTestCase {
|
|||
assertThat(parse, sameInstance(Fuzziness.AUTO));
|
||||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
|
||||
}
|
||||
|
||||
{
|
||||
String[] values = new String[]{"d", "H", "ms", "s", "S", "w"};
|
||||
String actual = randomIntBetween(1, 3) + randomFrom(values);
|
||||
XContent xcontent = XContentType.JSON.xContent();
|
||||
String json = jsonBuilder().startObject()
|
||||
.field(Fuzziness.X_FIELD_NAME, actual)
|
||||
.endObject().string();
|
||||
XContentParser parser = xcontent.createParser(json);
|
||||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
|
||||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
|
||||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
|
||||
Fuzziness parse = Fuzziness.parse(parser);
|
||||
assertThat(parse.asTimeValue(), equalTo(TimeValue.parseTimeValue(actual, null, "fuzziness")));
|
||||
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testAuto() {
|
||||
assertThat(Fuzziness.AUTO.asByte(), equalTo((byte) 1));
|
||||
assertThat(Fuzziness.AUTO.asInt(), equalTo(1));
|
||||
assertThat(Fuzziness.AUTO.asFloat(), equalTo(1f));
|
||||
assertThat(Fuzziness.AUTO.asDouble(), equalTo(1d));
|
||||
assertThat(Fuzziness.AUTO.asLong(), equalTo(1L));
|
||||
assertThat(Fuzziness.AUTO.asShort(), equalTo((short) 1));
|
||||
assertThat(Fuzziness.AUTO.asTimeValue(), equalTo(TimeValue.parseTimeValue("1ms", TimeValue.timeValueMillis(1), "fuzziness")));
|
||||
|
||||
}
|
||||
|
||||
public void testAsDistance() {
|
||||
|
@ -172,7 +139,7 @@ public class FuzzinessTests extends ESTestCase {
|
|||
Fuzziness fuzziness = Fuzziness.AUTO;
|
||||
Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness);
|
||||
assertEquals(fuzziness, deserializedFuzziness);
|
||||
assertEquals(fuzziness.asInt(), deserializedFuzziness.asInt());
|
||||
assertEquals(fuzziness.asFloat(), deserializedFuzziness.asFloat(), 0f);
|
||||
}
|
||||
|
||||
private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException {
|
||||
|
|
|
@ -66,7 +66,7 @@ public class DocumentFieldMapperTests extends LuceneTestCase {
|
|||
|
||||
}
|
||||
|
||||
static class FakeFieldType extends MappedFieldType {
|
||||
static class FakeFieldType extends TermBasedFieldType {
|
||||
|
||||
public FakeFieldType() {
|
||||
super();
|
||||
|
|
|
@ -205,7 +205,7 @@ public class FieldTypeLookupTests extends ESTestCase {
|
|||
return count;
|
||||
}
|
||||
|
||||
static class OtherFakeFieldType extends MappedFieldType {
|
||||
static class OtherFakeFieldType extends TermBasedFieldType {
|
||||
public OtherFakeFieldType() {
|
||||
}
|
||||
|
||||
|
|
|
@ -273,7 +273,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
fieldType.checkCompatibility(fieldType, conflicts, random().nextBoolean()); // no exception
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
|
||||
MappedFieldType bogus = new MappedFieldType() {
|
||||
MappedFieldType bogus = new TermBasedFieldType() {
|
||||
@Override
|
||||
public MappedFieldType clone() {return null;}
|
||||
@Override
|
||||
|
@ -287,7 +287,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
|
|||
}
|
||||
assertTrue(conflicts.toString(), conflicts.isEmpty());
|
||||
|
||||
MappedFieldType other = new MappedFieldType() {
|
||||
MappedFieldType other = new TermBasedFieldType() {
|
||||
@Override
|
||||
public MappedFieldType clone() {return null;}
|
||||
@Override
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.externalvalues;
|
|||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
|
@ -33,9 +32,9 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
@ -50,7 +49,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
/**
|
||||
* This mapper add a new sub fields
|
||||
|
@ -129,7 +127,7 @@ public class ExternalMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
static class ExternalFieldType extends MappedFieldType {
|
||||
static class ExternalFieldType extends TermBasedFieldType {
|
||||
|
||||
public ExternalFieldType() {}
|
||||
|
||||
|
|
|
@ -22,12 +22,8 @@ package org.elasticsearch.index.mapper.externalvalues;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -36,8 +32,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -93,7 +89,7 @@ public class FakeStringFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class FakeStringFieldType extends MappedFieldType {
|
||||
public static final class FakeStringFieldType extends StringFieldType {
|
||||
|
||||
|
||||
public FakeStringFieldType() {
|
||||
|
@ -119,16 +115,6 @@ public class FakeStringFieldMapper extends FieldMapper {
|
|||
}
|
||||
return termQuery(nullValue(), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method,
|
||||
@Nullable QueryShardContext context) {
|
||||
RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
}
|
||||
|
||||
protected FakeStringFieldMapper(String simpleName, FakeStringFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.TermBasedFieldType;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -177,7 +178,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
}
|
||||
|
||||
private static class DummyFieldType extends MappedFieldType {
|
||||
private static class DummyFieldType extends TermBasedFieldType {
|
||||
|
||||
public DummyFieldType() {
|
||||
super();
|
||||
|
|
|
@ -26,4 +26,12 @@ public class IdFieldTypeTests extends FieldTypeTestCase {
|
|||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new IdFieldMapper.IdFieldType();
|
||||
}
|
||||
|
||||
public void testRangeQuery() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("_id");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
||||
assertEquals("Field [_id] of type [_id] does not support range queries", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,4 +26,12 @@ public class UidFieldTypeTests extends FieldTypeTestCase {
|
|||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new UidFieldMapper.UidFieldType();
|
||||
}
|
||||
|
||||
public void testRangeQuery() {
|
||||
MappedFieldType ft = createDefaultFieldType();
|
||||
ft.setName("_uid");
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
||||
assertEquals("Field [_uid] of type [_uid] does not support range queries", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,8 +41,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
|
||||
@Override
|
||||
protected FuzzyQueryBuilder doCreateTestQueryBuilder() {
|
||||
Tuple<String, Object> fieldAndValue = getRandomFieldNameAndValue();
|
||||
FuzzyQueryBuilder query = new FuzzyQueryBuilder(fieldAndValue.v1(), fieldAndValue.v2());
|
||||
FuzzyQueryBuilder query = new FuzzyQueryBuilder(STRING_FIELD_NAME, getRandomValueForFieldName(STRING_FIELD_NAME));
|
||||
if (randomBoolean()) {
|
||||
query.fuzziness(randomFuzziness(query.fieldName()));
|
||||
}
|
||||
|
@ -63,11 +62,7 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(FuzzyQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
if (isNumericFieldName(queryBuilder.fieldName()) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) {
|
||||
assertThat(query, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
} else {
|
||||
assertThat(query, instanceOf(FuzzyQuery.class));
|
||||
}
|
||||
assertThat(query, instanceOf(FuzzyQuery.class));
|
||||
}
|
||||
|
||||
public void testIllegalArguments() {
|
||||
|
@ -142,14 +137,10 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
|
||||
Query expected;
|
||||
if (getIndexVersionCreated().onOrAfter(Version.V_5_0_0_alpha2)) {
|
||||
expected = IntPoint.newRangeQuery(INT_FIELD_NAME, 7, 17);
|
||||
} else {
|
||||
expected = LegacyNumericRangeQuery.newIntRange(INT_FIELD_NAME, 7, 17, true, true);
|
||||
}
|
||||
assertEquals(expected, parsedQuery);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> parseQuery(query).toQuery(createShardContext()));
|
||||
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -169,6 +160,6 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
|
|||
FuzzyQueryBuilder parsed = (FuzzyQueryBuilder) parseQuery(json);
|
||||
checkGeneratedJson(json, parsed);
|
||||
assertEquals(json, 42.0, parsed.boost(), 0.00001);
|
||||
assertEquals(json, 2, parsed.fuzziness().asInt());
|
||||
assertEquals(json, 2, parsed.fuzziness().asFloat(), 0f);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,13 +33,11 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
|||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
import org.elasticsearch.index.search.MatchQuery.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
@ -74,10 +72,15 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
matchQuery.operator(randomFrom(Operator.values()));
|
||||
|
||||
if (randomBoolean()) {
|
||||
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
|
||||
if (fieldName.equals(DATE_FIELD_NAME)) {
|
||||
// tokenized dates would trigger parse errors
|
||||
matchQuery.analyzer(randomFrom("keyword", "whitespace"));
|
||||
} else {
|
||||
matchQuery.analyzer(randomFrom("simple", "keyword", "whitespace"));
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldName.equals(BOOLEAN_FIELD_NAME) == false && randomBoolean()) {
|
||||
if (fieldName.equals(STRING_FIELD_NAME) && randomBoolean()) {
|
||||
matchQuery.fuzziness(randomFuzziness(fieldName));
|
||||
}
|
||||
|
||||
|
@ -128,17 +131,17 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
case BOOLEAN:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(MatchNoDocsQuery.class))
|
||||
.or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
.or(instanceOf(PointRangeQuery.class)));
|
||||
break;
|
||||
case PHRASE:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
|
||||
.or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
.or(instanceOf(PointRangeQuery.class)));
|
||||
break;
|
||||
case PHRASE_PREFIX:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
|
||||
.or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
.or(instanceOf(PointRangeQuery.class)));
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -193,32 +196,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions()));
|
||||
}
|
||||
|
||||
if (query instanceof LegacyNumericRangeQuery) {
|
||||
// These are fuzzy numeric queries
|
||||
assertTrue(queryBuilder.fuzziness() != null);
|
||||
@SuppressWarnings("unchecked")
|
||||
LegacyNumericRangeQuery<Number> numericRangeQuery = (LegacyNumericRangeQuery<Number>) query;
|
||||
assertTrue(numericRangeQuery.includesMin());
|
||||
assertTrue(numericRangeQuery.includesMax());
|
||||
|
||||
double value;
|
||||
double width;
|
||||
if (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false) {
|
||||
value = Double.parseDouble(queryBuilder.value().toString());
|
||||
if (queryBuilder.fuzziness().equals(Fuzziness.AUTO)) {
|
||||
width = 1;
|
||||
} else {
|
||||
width = queryBuilder.fuzziness().asDouble();
|
||||
}
|
||||
} else {
|
||||
value = ISODateTimeFormat.dateTimeParser().parseMillis(queryBuilder.value().toString());
|
||||
width = queryBuilder.fuzziness().asTimeValue().getMillis();
|
||||
}
|
||||
|
||||
assertEquals(value - width, numericRangeQuery.getMin().doubleValue(), width * .1);
|
||||
assertEquals(value + width, numericRangeQuery.getMax().doubleValue(), width * .1);
|
||||
}
|
||||
|
||||
if (query instanceof PointRangeQuery) {
|
||||
// TODO
|
||||
}
|
||||
|
@ -414,4 +391,37 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
|||
containsString("Deprecated field [" + type + "] used, expected [match] instead"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testFuzzinessOnNonStringField() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42);
|
||||
query.fuzziness(randomFuzziness(INT_FIELD_NAME));
|
||||
QueryShardContext context = createShardContext();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
query.analyzer("keyword"); // triggers a different code path
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
query.analyzer(null);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
|
||||
public void testExactOnUnsupportedField() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
MatchQueryBuilder query = new MatchQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
|
||||
e.getMessage());
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
@ -77,12 +78,17 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
|
|||
query.operator(randomFrom(Operator.values()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.analyzer(randomAnalyzer());
|
||||
if (fieldName.equals(DATE_FIELD_NAME)) {
|
||||
// tokenized dates would trigger parse errors
|
||||
query.analyzer("keyword");
|
||||
} else {
|
||||
query.analyzer(randomAnalyzer());
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
query.slop(randomIntBetween(0, 5));
|
||||
}
|
||||
if (randomBoolean() && (query.type() == Type.BEST_FIELDS || query.type() == Type.MOST_FIELDS)) {
|
||||
if (fieldName.equals(STRING_FIELD_NAME) && randomBoolean() && (query.type() == Type.BEST_FIELDS || query.type() == Type.MOST_FIELDS)) {
|
||||
query.fuzziness(randomFuzziness(fieldName));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
@ -291,4 +297,23 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
|
|||
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
|
||||
assertEquals("[multi_match] unknown token [START_ARRAY] after [query]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFuzzinessOnNonStringField() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
MultiMatchQueryBuilder query = new MultiMatchQueryBuilder(42).field(INT_FIELD_NAME).field(BOOLEAN_FIELD_NAME);
|
||||
query.fuzziness(randomFuzziness(INT_FIELD_NAME));
|
||||
QueryShardContext context = createShardContext();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertThat(e.getMessage(), containsString("Can only use fuzzy queries on keyword and text fields"));
|
||||
query.analyzer("keyword"); // triggers a different code path
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertThat(e.getMessage(), containsString("Can only use fuzzy queries on keyword and text fields"));
|
||||
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
query.analyzer(null);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
|
@ -93,4 +94,14 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
|
|||
assertEquals(json, 2.0, parsed.boost(), 0.00001);
|
||||
assertEquals(json, "user", parsed.fieldName());
|
||||
}
|
||||
|
||||
public void testNumeric() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
PrefixQueryBuilder query = prefixQuery(INT_FIELD_NAME, "12*");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Can only use prefix queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.apache.lucene.search.SynonymQuery;
|
|||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.hamcrest.Matchers;
|
||||
|
@ -393,16 +392,40 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
}
|
||||
}
|
||||
|
||||
public void testToQueryNumericRangeQuery() throws Exception {
|
||||
public void testFuzzyNumeric() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext());
|
||||
if (getIndexVersionCreated().onOrAfter(Version.V_5_0_0_alpha2)) {
|
||||
assertEquals(IntPoint.newExactQuery(INT_FIELD_NAME, 12), query);
|
||||
} else {
|
||||
LegacyNumericRangeQuery fuzzyQuery = (LegacyNumericRangeQuery) query;
|
||||
assertThat(fuzzyQuery.getMin().longValue(), equalTo(12L));
|
||||
assertThat(fuzzyQuery.getMax().longValue(), equalTo(12L));
|
||||
}
|
||||
QueryStringQueryBuilder query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME);
|
||||
QueryShardContext context = createShardContext();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Can only use fuzzy queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
|
||||
public void testPrefixNumeric() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
QueryStringQueryBuilder query = queryStringQuery("12*").defaultField(INT_FIELD_NAME);
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Can only use prefix queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
|
||||
public void testExactGeo() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
QueryStringQueryBuilder query = queryStringQuery("2,3").defaultField(GEO_POINT_FIELD_NAME);
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
|
||||
e.getMessage());
|
||||
query.lenient(true);
|
||||
query.toQuery(context); // no exception
|
||||
}
|
||||
|
||||
public void testTimezone() throws Exception {
|
||||
|
@ -518,4 +541,5 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
assertEquals(json, "this AND that OR thus", parsed.queryString());
|
||||
assertEquals(json, "content", parsed.defaultField());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
|
@ -101,4 +102,14 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
|
|||
assertEquals(json, "s.*y", parsed.value());
|
||||
assertEquals(json, 20000, parsed.maxDeterminizedStates());
|
||||
}
|
||||
|
||||
public void testNumeric() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
RegexpQueryBuilder query = new RegexpQueryBuilder(INT_FIELD_NAME, "12");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Can only use regexp queries on keyword and text fields - not on [mapped_int] which is of type [integer]",
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,4 +139,14 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
|
|||
|
||||
assertEquals(json, "Quick Foxes!", parsed.value());
|
||||
}
|
||||
|
||||
public void testGeo() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
TermQueryBuilder query = new TermQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
@ -74,7 +75,10 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
// terms query or lookup query
|
||||
if (randomBoolean()) {
|
||||
// make between 0 and 5 different values of the same type
|
||||
String fieldName = getRandomFieldName();
|
||||
String fieldName;
|
||||
do {
|
||||
fieldName = getRandomFieldName();
|
||||
} while (fieldName.equals(GEO_POINT_FIELD_NAME) || fieldName.equals(GEO_SHAPE_FIELD_NAME));
|
||||
Object[] values = new Object[randomInt(5)];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
values[i] = getRandomValueForFieldName(fieldName);
|
||||
|
@ -314,5 +318,15 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
assertEquals(termsQueryBuilder.rewrite(createShardContext()), new TermsQueryBuilder(STRING_FIELD_NAME,
|
||||
randomTerms.stream().filter(x -> x != null).collect(Collectors.toList()))); // terms lookup removes null values
|
||||
}
|
||||
|
||||
public void testGeo() throws Exception {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
TermsQueryBuilder query = new TermsQueryBuilder(GEO_POINT_FIELD_NAME, "2,3");
|
||||
QueryShardContext context = createShardContext();
|
||||
QueryShardException e = expectThrows(QueryShardException.class,
|
||||
() -> query.toQuery(context));
|
||||
assertEquals("Geo fields do not support exact searching, use dedicated geo queries instead: [mapped_geo_point]",
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1003,14 +1003,6 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertFirstHit(searchResponse, hasId("1"));
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:11~1")).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertFirstHit(searchResponse, hasId("1"));
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(queryStringQuery("date:2012-02-02~1d")).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertFirstHit(searchResponse, hasId("1"));
|
||||
}
|
||||
|
||||
public void testQuotedQueryStringWithBoost() throws InterruptedException, ExecutionException {
|
||||
|
|
|
@ -192,13 +192,6 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
|
||||
searchResponse = client().prepareSearch().setQuery(QueryBuilders.queryStringQuery("_id:XXX1")).execute().actionGet();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
|
||||
// id is not index, but we can automatically support prefix as well
|
||||
searchResponse = client().prepareSearch().setQuery(QueryBuilders.prefixQuery("_id", "XXX")).execute().actionGet();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(QueryBuilders.queryStringQuery("_id:XXX*").lowercaseExpandedTerms(false)).execute().actionGet();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
}
|
||||
|
||||
public void testSimpleDateRange() throws Exception {
|
||||
|
@ -421,6 +414,7 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)).get(),
|
||||
1);
|
||||
}
|
||||
|
||||
public void testQueryNumericFieldWithRegex() throws Exception {
|
||||
assertAcked(prepareCreate("idx").addMapping("type", "num", "type=integer"));
|
||||
ensureGreen("idx");
|
||||
|
@ -429,7 +423,7 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
client().prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", "34")).get();
|
||||
fail("SearchPhaseExecutionException should have been thrown");
|
||||
} catch (SearchPhaseExecutionException ex) {
|
||||
assertThat(ex.getCause().getCause().getMessage(), containsString("Can only use regular expression on keyword and text fields"));
|
||||
assertThat(ex.getCause().getCause().getMessage(), containsString("Can only use regexp queries on keyword and text fields"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
package org.elasticsearch.search.sort;
|
||||
|
||||
|
||||
import org.apache.lucene.queryparser.xml.builders.MatchAllDocsQueryBuilder;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
|
@ -34,6 +36,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.query.GeoValidationMethod;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
|
@ -90,7 +93,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
|
|||
result.sortMode(randomValueOtherThan(SortMode.SUM, () -> randomFrom(SortMode.values())));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
result.setNestedFilter(randomNestedFilter());
|
||||
result.setNestedFilter(new MatchAllQueryBuilder());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
result.setNestedPath(
|
||||
|
|
|
@ -428,27 +428,9 @@ precision is interpreted as the length of the geohash.
|
|||
=== Fuzziness
|
||||
|
||||
Some queries and APIs support parameters to allow inexact _fuzzy_ matching,
|
||||
using the `fuzziness` parameter. The `fuzziness` parameter is context
|
||||
sensitive which means that it depends on the type of the field being queried:
|
||||
using the `fuzziness` parameter.
|
||||
|
||||
[float]
|
||||
==== Numeric, date and IPv4 fields
|
||||
|
||||
When querying numeric, date and IPv4 fields, `fuzziness` is interpreted as a
|
||||
`+/-` margin. It behaves like a <<query-dsl-range-query>> where:
|
||||
|
||||
-fuzziness <= field value <= +fuzziness
|
||||
|
||||
The `fuzziness` parameter should be set to a numeric value, eg `2` or `2.0`. A
|
||||
`date` field interprets a long as milliseconds, but also accepts a string
|
||||
containing a time value -- `"1h"` -- as explained in <<time-units>>. An `ip`
|
||||
field accepts a long or another IPv4 address (which will be converted into a
|
||||
long).
|
||||
|
||||
[float]
|
||||
==== String fields
|
||||
|
||||
When querying `string` fields, `fuzziness` is interpreted as a
|
||||
When querying `text` or `keyword` fields, `fuzziness` is interpreted as a
|
||||
http://en.wikipedia.org/wiki/Levenshtein_distance[Levenshtein Edit Distance]
|
||||
-- the number of one character changes that need to be made to one string to
|
||||
make it the same as another string.
|
||||
|
|
|
@ -91,6 +91,16 @@ The following deprecated queries have been removed:
|
|||
|
||||
==== Changes to queries
|
||||
|
||||
* Unsupported queries such as term queries on `geo_point` fields will now fail
|
||||
rather than returning no hits.
|
||||
|
||||
* Removed support for fuzzy queries on numeric, date and ip fields, use range
|
||||
queries instead.
|
||||
|
||||
* Removed support for range and prefix queries on `_uid` and `_id` fields.
|
||||
|
||||
* Querying an unindexed field will now fail rather than returning no hits.
|
||||
|
||||
* Removed support for the deprecated `min_similarity` parameter in `fuzzy
|
||||
query`, in favour of `fuzziness`.
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
[[query-dsl-fuzzy-query]]
|
||||
=== Fuzzy Query
|
||||
|
||||
deprecated[5.0.0, Will be removed without a replacement for `string` fields. Note that the `fuzziness` parameter is still supported for match queries and in suggesters. Use range queries for `date` and `numeric` fields instead.]
|
||||
deprecated[5.0.0, Will be removed in 6.0. Use match queries with fuzziness instead]
|
||||
|
||||
The fuzzy query uses similarity based on Levenshtein edit distance for
|
||||
`string` fields, and a `+/-` margin on numeric and date fields.
|
||||
The fuzzy query uses similarity based on Levenshtein edit distance.
|
||||
|
||||
==== String fields
|
||||
|
||||
|
@ -63,41 +62,3 @@ WARNING: This query can be very heavy if `prefix_length` is set to `0` and if
|
|||
`max_expansions` is set to a high number. It could result in every term in the
|
||||
index being examined!
|
||||
|
||||
[float]
|
||||
==== Numeric and date fields
|
||||
|
||||
Performs a <<query-dsl-range-query>> ``around'' the value using the
|
||||
`fuzziness` value as a `+/-` range, where:
|
||||
|
||||
-fuzziness <= field value <= +fuzziness
|
||||
|
||||
For example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"fuzzy" : {
|
||||
"price" : {
|
||||
"value" : 12,
|
||||
"fuzziness" : 2
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Will result in a range query between 10 and 14. Date fields support
|
||||
<<time-units,time values>>, eg:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"fuzzy" : {
|
||||
"created" : {
|
||||
"value" : "2010-02-05T12:05:07",
|
||||
"fuzziness" : "1d"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
See <<fuzziness>> for more details about accepted values.
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.mapper.attachments;
|
|||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.tika.language.LanguageIdentifier;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -42,6 +43,8 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper.NumberType;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -118,6 +121,11 @@ public class AttachmentMapper extends FieldMapper {
|
|||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Attachment fields are not searchable: [" + name() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, AttachmentMapper> {
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
|
@ -40,6 +41,8 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.TypeParsers;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
public class Murmur3FieldMapper extends FieldMapper {
|
||||
|
||||
|
@ -123,6 +126,11 @@ public class Murmur3FieldMapper extends FieldMapper {
|
|||
failIfNoDocValues();
|
||||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context, "Murmur3 fields are not searchable: [" + name() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
protected Murmur3FieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
|
|
|
@ -49,7 +49,7 @@ public class MockFieldMapper extends FieldMapper {
|
|||
return fullName.substring(ndx + 1);
|
||||
}
|
||||
|
||||
public static class FakeFieldType extends MappedFieldType {
|
||||
public static class FakeFieldType extends TermBasedFieldType {
|
||||
public FakeFieldType() {
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue