mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
Merge branch 'master' into feature/query-refactoring
Conflicts: core/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java core/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java core/src/main/java/org/elasticsearch/index/query/TermQueryParser.java
This commit is contained in:
commit
5f66f68135
@ -19,15 +19,12 @@
|
||||
|
||||
package org.apache.lucene.queries;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
/**
|
||||
* Extended version of {@link CommonTermsQuery} that allows to pass in a
|
||||
@ -36,11 +33,11 @@ import java.io.IOException;
|
||||
*/
|
||||
public class ExtendedCommonTermsQuery extends CommonTermsQuery {
|
||||
|
||||
private final FieldMapper mapper;
|
||||
private final MappedFieldType fieldType;
|
||||
|
||||
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, FieldMapper mapper) {
|
||||
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, MappedFieldType fieldType) {
|
||||
super(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord);
|
||||
this.mapper = mapper;
|
||||
this.fieldType = fieldType;
|
||||
}
|
||||
|
||||
private String lowFreqMinNumShouldMatchSpec;
|
||||
@ -81,10 +78,10 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery {
|
||||
|
||||
@Override
|
||||
protected Query newTermQuery(Term term, TermContext context) {
|
||||
if (mapper == null) {
|
||||
if (fieldType == null) {
|
||||
return super.newTermQuery(term, context);
|
||||
}
|
||||
final Query query = mapper.queryStringTermQuery(term);
|
||||
final Query query = fieldType.queryStringTermQuery(term);
|
||||
if (query == null) {
|
||||
return super.newTermQuery(term, context);
|
||||
} else {
|
||||
|
@ -21,7 +21,6 @@ package org.apache.lucene.queryparser.classic;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
@ -37,7 +36,7 @@ import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
@ -77,7 +76,7 @@ public class MapperQueryParser extends QueryParser {
|
||||
private boolean forcedAnalyzer;
|
||||
private boolean forcedQuoteAnalyzer;
|
||||
|
||||
private FieldMapper currentMapper;
|
||||
private MappedFieldType currentFieldType;
|
||||
|
||||
private boolean analyzeWildcard;
|
||||
|
||||
@ -148,8 +147,8 @@ public class MapperQueryParser extends QueryParser {
|
||||
|
||||
@Override
|
||||
protected Query newTermQuery(Term term) {
|
||||
if (currentMapper != null) {
|
||||
Query termQuery = currentMapper.queryStringTermQuery(term);
|
||||
if (currentFieldType != null) {
|
||||
Query termQuery = currentFieldType.queryStringTermQuery(term);
|
||||
if (termQuery != null) {
|
||||
return termQuery;
|
||||
}
|
||||
@ -224,33 +223,33 @@ public class MapperQueryParser extends QueryParser {
|
||||
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
|
||||
}
|
||||
}
|
||||
currentMapper = null;
|
||||
currentFieldType = null;
|
||||
Analyzer oldAnalyzer = getAnalyzer();
|
||||
try {
|
||||
if (quoted) {
|
||||
setAnalyzer(quoteAnalyzer);
|
||||
if (quoteFieldSuffix != null) {
|
||||
currentMapper = parseContext.fieldMapper(field + quoteFieldSuffix);
|
||||
currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix);
|
||||
}
|
||||
}
|
||||
if (currentMapper == null) {
|
||||
currentMapper = parseContext.fieldMapper(field);
|
||||
if (currentFieldType == null) {
|
||||
currentFieldType = parseContext.fieldMapper(field);
|
||||
}
|
||||
if (currentMapper != null) {
|
||||
if (currentFieldType != null) {
|
||||
if (quoted) {
|
||||
if (!forcedQuoteAnalyzer) {
|
||||
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentMapper));
|
||||
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType));
|
||||
}
|
||||
} else {
|
||||
if (!forcedAnalyzer) {
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
|
||||
}
|
||||
}
|
||||
if (currentMapper != null) {
|
||||
if (currentFieldType != null) {
|
||||
Query query = null;
|
||||
if (currentMapper.useTermQueryWithQueryString()) {
|
||||
if (currentFieldType.useTermQueryWithQueryString()) {
|
||||
try {
|
||||
query = currentMapper.termQuery(queryText, parseContext);
|
||||
query = currentFieldType.termQuery(queryText, parseContext);
|
||||
} catch (RuntimeException e) {
|
||||
if (settings.lenient()) {
|
||||
return null;
|
||||
@ -260,7 +259,7 @@ public class MapperQueryParser extends QueryParser {
|
||||
}
|
||||
}
|
||||
if (query == null) {
|
||||
query = super.getFieldQuery(currentMapper.fieldType().names().indexName(), queryText, quoted);
|
||||
query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
@ -361,20 +360,20 @@ public class MapperQueryParser extends QueryParser {
|
||||
}
|
||||
|
||||
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
|
||||
currentMapper = parseContext.fieldMapper(field);
|
||||
if (currentMapper != null) {
|
||||
if (lowercaseExpandedTerms && !currentMapper.isNumeric()) {
|
||||
currentFieldType = parseContext.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
|
||||
part1 = part1 == null ? null : part1.toLowerCase(locale);
|
||||
part2 = part2 == null ? null : part2.toLowerCase(locale);
|
||||
}
|
||||
|
||||
try {
|
||||
Query rangeQuery;
|
||||
if (currentMapper instanceof DateFieldMapper && settings.timeZone() != null) {
|
||||
DateFieldMapper dateFieldMapper = (DateFieldMapper) this.currentMapper;
|
||||
rangeQuery = dateFieldMapper.fieldType().rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext);
|
||||
if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
|
||||
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
|
||||
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext);
|
||||
} else {
|
||||
rangeQuery = currentMapper.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext);
|
||||
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext);
|
||||
}
|
||||
return rangeQuery;
|
||||
} catch (RuntimeException e) {
|
||||
@ -426,11 +425,11 @@ public class MapperQueryParser extends QueryParser {
|
||||
}
|
||||
|
||||
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
|
||||
currentMapper = parseContext.fieldMapper(field);
|
||||
if (currentMapper!= null) {
|
||||
currentFieldType = parseContext.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
try {
|
||||
//LUCENE 4 UPGRADE I disabled transpositions here by default - maybe this needs to be changed
|
||||
return currentMapper.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), false);
|
||||
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), false);
|
||||
} catch (RuntimeException e) {
|
||||
if (settings.lenient()) {
|
||||
return null;
|
||||
@ -495,20 +494,20 @@ public class MapperQueryParser extends QueryParser {
|
||||
}
|
||||
|
||||
private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {
|
||||
currentMapper = null;
|
||||
currentFieldType = null;
|
||||
Analyzer oldAnalyzer = getAnalyzer();
|
||||
try {
|
||||
currentMapper = parseContext.fieldMapper(field);
|
||||
if (currentMapper != null) {
|
||||
currentFieldType = parseContext.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
if (!forcedAnalyzer) {
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
|
||||
}
|
||||
Query query = null;
|
||||
if (currentMapper.useTermQueryWithQueryString()) {
|
||||
query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
|
||||
if (currentFieldType.useTermQueryWithQueryString()) {
|
||||
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
|
||||
}
|
||||
if (query == null) {
|
||||
query = getPossiblyAnalyzedPrefixQuery(currentMapper.fieldType().names().indexName(), termStr);
|
||||
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
@ -636,15 +635,15 @@ public class MapperQueryParser extends QueryParser {
|
||||
|
||||
private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {
|
||||
String indexedNameField = field;
|
||||
currentMapper = null;
|
||||
currentFieldType = null;
|
||||
Analyzer oldAnalyzer = getAnalyzer();
|
||||
try {
|
||||
currentMapper = parseContext.fieldMapper(field);
|
||||
if (currentMapper != null) {
|
||||
currentFieldType = parseContext.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
if (!forcedAnalyzer) {
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
|
||||
}
|
||||
indexedNameField = currentMapper.fieldType().names().indexName();
|
||||
indexedNameField = currentFieldType.names().indexName();
|
||||
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
|
||||
}
|
||||
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
|
||||
@ -768,17 +767,17 @@ public class MapperQueryParser extends QueryParser {
|
||||
}
|
||||
|
||||
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {
|
||||
currentMapper = null;
|
||||
currentFieldType = null;
|
||||
Analyzer oldAnalyzer = getAnalyzer();
|
||||
try {
|
||||
currentMapper = parseContext.fieldMapper(field);
|
||||
if (currentMapper != null) {
|
||||
currentFieldType = parseContext.fieldMapper(field);
|
||||
if (currentFieldType != null) {
|
||||
if (!forcedAnalyzer) {
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
|
||||
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
|
||||
}
|
||||
Query query = null;
|
||||
if (currentMapper.useTermQueryWithQueryString()) {
|
||||
query = currentMapper.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
|
||||
if (currentFieldType.useTermQueryWithQueryString()) {
|
||||
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
|
||||
}
|
||||
if (query == null) {
|
||||
query = super.getRegexpQuery(field, termStr);
|
||||
|
@ -239,7 +239,9 @@ public class Version {
|
||||
public static final int V_1_5_3_ID = 1050399;
|
||||
public static final Version V_1_5_3 = new Version(V_1_5_3_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_1_6_0_ID = 1060099;
|
||||
public static final Version V_1_6_0 = new Version(V_1_6_0_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final Version V_1_6_0 = new Version(V_1_6_0_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_1_6_1_ID = 1060199;
|
||||
public static final Version V_1_6_1 = new Version(V_1_6_1_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
|
||||
public static final int V_2_0_0_ID = 2000099;
|
||||
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_0);
|
||||
|
||||
@ -257,6 +259,8 @@ public class Version {
|
||||
switch (id) {
|
||||
case V_2_0_0_ID:
|
||||
return V_2_0_0;
|
||||
case V_1_6_1_ID:
|
||||
return V_1_6_1;
|
||||
case V_1_6_0_ID:
|
||||
return V_1_6_0;
|
||||
case V_1_5_3_ID:
|
||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.analysis.*;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -108,13 +109,13 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction
|
||||
if (indexService == null) {
|
||||
throw new IllegalArgumentException("No index provided, and trying to analyzer based on a specific field which requires the index parameter");
|
||||
}
|
||||
FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper(request.field());
|
||||
if (fieldMapper != null) {
|
||||
if (fieldMapper.isNumeric()) {
|
||||
MappedFieldType fieldType = indexService.mapperService().smartNameFieldType(request.field());
|
||||
if (fieldType != null) {
|
||||
if (fieldType.isNumeric()) {
|
||||
throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
|
||||
}
|
||||
analyzer = fieldMapper.fieldType().indexAnalyzer();
|
||||
field = fieldMapper.fieldType().names().indexName();
|
||||
analyzer = fieldType.indexAnalyzer();
|
||||
field = fieldType.names().indexName();
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -135,12 +136,12 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
||||
shard.readAllowed();
|
||||
try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) {
|
||||
for (String field : request.getFields()) {
|
||||
FieldMapper fieldMapper = mapperService.fullName(field);
|
||||
if (fieldMapper != null) {
|
||||
MappedFieldType fieldType = mapperService.fullName(field);
|
||||
if (fieldType != null) {
|
||||
IndexReader reader = searcher.reader();
|
||||
Terms terms = MultiFields.getTerms(reader, field);
|
||||
if (terms != null) {
|
||||
fieldStats.put(field, fieldMapper.stats(terms, reader.maxDoc()));
|
||||
fieldStats.put(field, fieldType.stats(terms, reader.maxDoc()));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("field [" + field + "] doesn't exist");
|
||||
|
@ -26,6 +26,7 @@ import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
|
||||
|
||||
@ -54,14 +55,14 @@ public class PerFieldMappingPostingFormatCodec extends Lucene50Codec {
|
||||
|
||||
@Override
|
||||
public PostingsFormat getPostingsFormatForField(String field) {
|
||||
final FieldMapper indexName = mapperService.indexName(field);
|
||||
final MappedFieldType indexName = mapperService.indexName(field);
|
||||
if (indexName == null) {
|
||||
logger.warn("no index mapper found for field: [{}] returning default postings format", field);
|
||||
} else if (indexName instanceof CompletionFieldMapper) {
|
||||
} else if (indexName instanceof CompletionFieldMapper.CompletionFieldType) {
|
||||
// CompletionFieldMapper needs a special postings format
|
||||
final CompletionFieldMapper mapper = (CompletionFieldMapper) indexName;
|
||||
final CompletionFieldMapper.CompletionFieldType fieldType = (CompletionFieldMapper.CompletionFieldType) indexName;
|
||||
final PostingsFormat defaultFormat = super.getPostingsFormatForField(field);
|
||||
return mapper.postingsFormat(defaultFormat);
|
||||
return fieldType.postingsFormat(defaultFormat);
|
||||
}
|
||||
return super.getPostingsFormatForField(field);
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
|
||||
|
||||
interface Builder {
|
||||
|
||||
IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService);
|
||||
}
|
||||
|
||||
|
@ -228,13 +228,13 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper mapper) {
|
||||
final Names fieldNames = mapper.fieldType().names();
|
||||
final FieldDataType type = mapper.fieldType().fieldDataType();
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
|
||||
final Names fieldNames = fieldType.names();
|
||||
final FieldDataType type = fieldType.fieldDataType();
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]");
|
||||
}
|
||||
final boolean docValues = mapper.fieldType().hasDocValues();
|
||||
final boolean docValues = fieldType.hasDocValues();
|
||||
final String key = fieldNames.indexName();
|
||||
IndexFieldData<?> fieldData = loadedFieldData.get(key);
|
||||
if (fieldData == null) {
|
||||
@ -279,7 +279,7 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
fieldDataCaches.put(fieldNames.indexName(), cache);
|
||||
}
|
||||
|
||||
fieldData = builder.build(index, indexSettings, mapper, cache, circuitBreakerService, indexService.mapperService());
|
||||
fieldData = builder.build(index, indexSettings, fieldType, cache, circuitBreakerService, indexService.mapperService());
|
||||
loadedFieldData.put(fieldNames.indexName(), fieldData);
|
||||
}
|
||||
} finally {
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Names;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
@ -64,11 +65,11 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
// Ignore breaker
|
||||
final Names fieldNames = mapper.fieldType().names();
|
||||
return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
|
||||
final Names fieldNames = fieldType.names();
|
||||
return new BytesBinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Names;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
@ -39,10 +40,10 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
|
||||
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
@Override
|
||||
public IndexFieldData<AtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
|
||||
public IndexFieldData<AtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
|
||||
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
// Ignore Circuit Breaker
|
||||
return new DisabledIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache);
|
||||
return new DisabledIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -91,11 +91,11 @@ public abstract class DocValuesIndexFieldData {
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
// Ignore Circuit Breaker
|
||||
final Names fieldNames = mapper.fieldType().names();
|
||||
final Settings fdSettings = mapper.fieldType().fieldDataType().getSettings();
|
||||
final Names fieldNames = fieldType.names();
|
||||
final Settings fdSettings = fieldType.fieldDataType().getSettings();
|
||||
final Map<String, Settings> filter = fdSettings.getGroups("filter");
|
||||
if (filter != null && !filter.isEmpty()) {
|
||||
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]");
|
||||
@ -103,19 +103,19 @@ public abstract class DocValuesIndexFieldData {
|
||||
|
||||
if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
|
||||
assert numericType == null;
|
||||
return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
|
||||
return new BinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
|
||||
} else if (NUMERIC_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
|
||||
assert !numericType.isFloatingPoint();
|
||||
return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
|
||||
return new NumericDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
|
||||
} else if (numericType != null) {
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1)) {
|
||||
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType());
|
||||
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
|
||||
} else {
|
||||
// prior to ES 1.4: multi-valued numerics were boxed inside a byte[] as BINARY
|
||||
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType());
|
||||
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
|
||||
}
|
||||
} else {
|
||||
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldType().fieldDataType());
|
||||
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, fieldType.fieldDataType());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,9 +73,9 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new DoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
|
||||
return new DoubleArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,9 +47,9 @@ public class FSTBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
|
||||
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
|
||||
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new FSTBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
|
||||
return new FSTBytesIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,9 +72,9 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<AtomicNumer
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new FloatArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
|
||||
return new FloatArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,11 +63,11 @@ public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData impl
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
// Ignore breaker
|
||||
final Names fieldNames = mapper.fieldType().names();
|
||||
return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
|
||||
final Names fieldNames = fieldType.names();
|
||||
return new GeoPointBinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -53,9 +53,9 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
FieldDataType type = mapper.fieldType().fieldDataType();
|
||||
FieldDataType type = fieldType.fieldDataType();
|
||||
final String precisionAsString = type.getSettings().get(PRECISION_KEY);
|
||||
final Distance precision;
|
||||
if (precisionAsString != null) {
|
||||
@ -63,7 +63,7 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField
|
||||
} else {
|
||||
precision = DEFAULT_PRECISION_VALUE;
|
||||
}
|
||||
return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, precision, breakerService);
|
||||
return new GeoPointCompressedIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, precision, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -47,9 +47,9 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
|
||||
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -46,9 +46,9 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
|
||||
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new IndexIndexFieldData(index, mapper.fieldType().names());
|
||||
return new IndexIndexFieldData(index, fieldType.names());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -85,9 +85,9 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
|
||||
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
|
||||
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new PackedArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, numericType, breakerService);
|
||||
return new PackedArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, numericType, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,9 +48,9 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
|
||||
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
|
||||
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new PagedBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
|
||||
return new PagedBytesIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -250,10 +250,10 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
||||
public static class Builder implements IndexFieldData.Builder {
|
||||
|
||||
@Override
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
|
||||
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
|
||||
IndexFieldDataCache cache, CircuitBreakerService breakerService,
|
||||
MapperService mapperService) {
|
||||
return new ParentChildIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache,
|
||||
return new ParentChildIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache,
|
||||
mapperService, breakerService);
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMappers;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
@ -61,13 +62,13 @@ public abstract class FieldsVisitor extends StoredFieldVisitor {
|
||||
}
|
||||
// can't derive exact mapping type
|
||||
for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {
|
||||
FieldMapper fieldMappers = mapperService.indexName(entry.getKey());
|
||||
if (fieldMappers == null) {
|
||||
MappedFieldType fieldType = mapperService.indexName(entry.getKey());
|
||||
if (fieldType == null) {
|
||||
continue;
|
||||
}
|
||||
List<Object> fieldValues = entry.getValue();
|
||||
for (int i = 0; i < fieldValues.size(); i++) {
|
||||
fieldValues.set(i, fieldMappers.valueForSearch(fieldValues.get(i)));
|
||||
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -80,7 +81,7 @@ public abstract class FieldsVisitor extends StoredFieldVisitor {
|
||||
}
|
||||
List<Object> fieldValues = entry.getValue();
|
||||
for (int i = 0; i < fieldValues.size(); i++) {
|
||||
fieldValues.set(i, fieldMapper.valueForSearch(fieldValues.get(i)));
|
||||
fieldValues.set(i, fieldMapper.fieldType().valueForSearch(fieldValues.get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.index.fieldvisitor;
|
||||
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
@ -55,7 +56,7 @@ public class SingleFieldsVisitor extends FieldsVisitor {
|
||||
super.reset();
|
||||
}
|
||||
|
||||
public void postProcess(FieldMapper mapper) {
|
||||
public void postProcess(MappedFieldType fieldType) {
|
||||
if (uid != null) {
|
||||
switch (field) {
|
||||
case UidFieldMapper.NAME: addValue(field, uid.toString());
|
||||
@ -67,12 +68,12 @@ public class SingleFieldsVisitor extends FieldsVisitor {
|
||||
if (fieldsValues == null) {
|
||||
return;
|
||||
}
|
||||
List<Object> fieldValues = fieldsValues.get(mapper.fieldType().names().indexName());
|
||||
List<Object> fieldValues = fieldsValues.get(fieldType.names().indexName());
|
||||
if (fieldValues == null) {
|
||||
return;
|
||||
}
|
||||
for (int i = 0; i < fieldValues.size(); i++) {
|
||||
fieldValues.set(i, mapper.valueForSearch(fieldValues.get(i)));
|
||||
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
|
||||
List<Object> values = searchLookup.source().extractRawValues(field);
|
||||
if (!values.isEmpty()) {
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
values.set(i, fieldMapper.valueForSearch(values.get(i)));
|
||||
values.set(i, fieldMapper.fieldType().valueForSearch(values.get(i)));
|
||||
}
|
||||
value = values;
|
||||
}
|
||||
@ -379,7 +379,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
|
||||
List<Object> values = searchLookup.source().extractRawValues(field);
|
||||
if (!values.isEmpty()) {
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
values.set(i, fieldMapper.valueForSearch(values.get(i)));
|
||||
values.set(i, fieldMapper.fieldType().valueForSearch(values.get(i)));
|
||||
}
|
||||
value = values;
|
||||
}
|
||||
|
@ -194,7 +194,7 @@ public class DocumentMapper implements ToXContent {
|
||||
meta);
|
||||
this.documentParser = new DocumentParser(index, indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock()));
|
||||
|
||||
this.typeFilter = typeMapper().termQuery(type, null);
|
||||
this.typeFilter = typeMapper().fieldType().termQuery(type, null);
|
||||
this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
|
||||
this.mappingLock = mappingLock;
|
||||
|
||||
|
@ -432,11 +432,7 @@ class DocumentParser implements Closeable {
|
||||
// we can only handle null values if we have mappings for them
|
||||
Mapper mapper = parentMapper.getMapper(lastFieldName);
|
||||
if (mapper != null) {
|
||||
if (mapper instanceof FieldMapper) {
|
||||
if (!((FieldMapper) mapper).supportsNullValue()) {
|
||||
throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]");
|
||||
}
|
||||
}
|
||||
// TODO: passing null to an object seems bogus?
|
||||
parseObjectOrField(context, mapper);
|
||||
} else if (parentMapper.dynamic() == ObjectMapper.Dynamic.STRICT) {
|
||||
throw new StrictDynamicMappingException(parentMapper.fullPath(), lastFieldName);
|
||||
|
@ -19,19 +19,9 @@
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -47,57 +37,6 @@ public interface FieldMapper extends Mapper {
|
||||
*/
|
||||
AbstractFieldMapper.CopyTo copyTo();
|
||||
|
||||
/**
|
||||
* Returns the actual value of the field.
|
||||
*/
|
||||
Object value(Object value);
|
||||
|
||||
/**
|
||||
* Returns the value that will be used as a result for search. Can be only of specific types... .
|
||||
*/
|
||||
Object valueForSearch(Object value);
|
||||
|
||||
/**
|
||||
* Returns the indexed value used to construct search "values".
|
||||
*/
|
||||
BytesRef indexedValueForSearch(Object value);
|
||||
|
||||
/**
|
||||
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
|
||||
* field in query string.
|
||||
*/
|
||||
boolean useTermQueryWithQueryString();
|
||||
|
||||
Query termQuery(Object value, @Nullable QueryParseContext context);
|
||||
|
||||
Query termsQuery(List values, @Nullable QueryParseContext context);
|
||||
|
||||
Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
|
||||
Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);
|
||||
|
||||
Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context);
|
||||
|
||||
Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context);
|
||||
|
||||
/**
|
||||
* A term query to use when parsing a query string. Can return <tt>null</tt>.
|
||||
*/
|
||||
@Nullable
|
||||
Query queryStringTermQuery(Term term);
|
||||
|
||||
/**
|
||||
* Null value filter, returns <tt>null</tt> if there is no null value associated with the field.
|
||||
*/
|
||||
@Nullable
|
||||
Query nullValueFilter();
|
||||
|
||||
boolean isNumeric();
|
||||
|
||||
boolean isSortable();
|
||||
|
||||
boolean supportsNullValue();
|
||||
|
||||
/**
|
||||
* Fields might not be available before indexing, for example _all, token_count,...
|
||||
* When get is called and these fields are requested, this case needs special treatment.
|
||||
@ -113,9 +52,4 @@ public interface FieldMapper extends Mapper {
|
||||
*/
|
||||
Mapper parse(ParseContext context) throws IOException;
|
||||
|
||||
/**
|
||||
* @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance.
|
||||
*/
|
||||
FieldStats stats(Terms terms, int maxDoc) throws IOException;
|
||||
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
@ -175,6 +176,8 @@ public class MappedFieldType extends FieldType {
|
||||
private SimilarityProvider similarity;
|
||||
private Loading normsLoading;
|
||||
private FieldDataType fieldDataType;
|
||||
private Object nullValue;
|
||||
private String nullValueAsString; // for sending null value to _all field
|
||||
|
||||
protected MappedFieldType(MappedFieldType ref) {
|
||||
super(ref);
|
||||
@ -187,6 +190,8 @@ public class MappedFieldType extends FieldType {
|
||||
this.similarity = ref.similarity();
|
||||
this.normsLoading = ref.normsLoading();
|
||||
this.fieldDataType = ref.fieldDataType();
|
||||
this.nullValue = ref.nullValue();
|
||||
this.nullValueAsString = ref.nullValueAsString();
|
||||
}
|
||||
|
||||
public MappedFieldType() {}
|
||||
@ -286,6 +291,23 @@ public class MappedFieldType extends FieldType {
|
||||
this.similarity = similarity;
|
||||
}
|
||||
|
||||
/** Returns the value that should be added when JSON null is found, or null if no value should be added */
|
||||
public Object nullValue() {
|
||||
return nullValue;
|
||||
}
|
||||
|
||||
/** Returns the null value stringified, so it can be used for e.g. _all field, or null if there is no null value */
|
||||
public String nullValueAsString() {
|
||||
return nullValueAsString;
|
||||
}
|
||||
|
||||
/** Sets the null value and initializes the string version */
|
||||
public void setNullValue(Object nullValue) {
|
||||
checkIfFrozen();
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
/** Returns the actual value of the field. */
|
||||
public Object value(Object value) {
|
||||
return value;
|
||||
@ -353,6 +375,13 @@ public class MappedFieldType extends FieldType {
|
||||
return query;
|
||||
}
|
||||
|
||||
public Query nullValueQuery() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance.
|
||||
*/
|
||||
|
@ -113,7 +113,7 @@ public class MapperService extends AbstractIndexComponent {
|
||||
|
||||
private final List<DocumentTypeListener> typeListeners = new CopyOnWriteArrayList<>();
|
||||
|
||||
private volatile ImmutableMap<String, FieldMapper> unmappedFieldMappers = ImmutableMap.of();
|
||||
private volatile ImmutableMap<String, MappedFieldType> unmappedFieldTypes = ImmutableMap.of();
|
||||
|
||||
private volatile ImmutableSet<String> parentTypes = ImmutableSet.of();
|
||||
|
||||
@ -474,31 +474,29 @@ public class MapperService extends AbstractIndexComponent {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an {@link FieldMapper} which has the given index name.
|
||||
* Returns an {@link MappedFieldType} which has the given index name.
|
||||
*
|
||||
* If multiple types have fields with the same index name, the first is returned.
|
||||
*/
|
||||
public FieldMapper indexName(String indexName) {
|
||||
public MappedFieldType indexName(String indexName) {
|
||||
FieldMappers mappers = fieldMappers.indexName(indexName);
|
||||
if (mappers == null) {
|
||||
return null;
|
||||
}
|
||||
return mappers.mapper();
|
||||
return mappers.mapper().fieldType();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link FieldMappers} of all the {@link FieldMapper}s that are
|
||||
* registered under the give fullName across all the different {@link DocumentMapper} types.
|
||||
* Returns the {@link MappedFieldType} for the give fullName.
|
||||
*
|
||||
* @param fullName The full name
|
||||
* @return All teh {@link FieldMappers} across all the {@link DocumentMapper}s for the given fullName.
|
||||
* If multiple types have fields with the same full name, the first is returned.
|
||||
*/
|
||||
public FieldMapper fullName(String fullName) {
|
||||
public MappedFieldType fullName(String fullName) {
|
||||
FieldMappers mappers = fieldMappers.fullName(fullName);
|
||||
if (mappers == null) {
|
||||
return null;
|
||||
}
|
||||
return mappers.mapper();
|
||||
return mappers.mapper().fieldType();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -563,17 +561,17 @@ public class MapperService extends AbstractIndexComponent {
|
||||
return null;
|
||||
}
|
||||
|
||||
public FieldMapper smartNameFieldMapper(String smartName) {
|
||||
FieldMapper mapper = fullName(smartName);
|
||||
if (mapper != null) {
|
||||
return mapper;
|
||||
public MappedFieldType smartNameFieldType(String smartName) {
|
||||
MappedFieldType fieldType = fullName(smartName);
|
||||
if (fieldType != null) {
|
||||
return fieldType;
|
||||
}
|
||||
return indexName(smartName);
|
||||
}
|
||||
|
||||
public FieldMapper smartNameFieldMapper(String smartName, @Nullable String[] types) {
|
||||
public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) {
|
||||
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
|
||||
return smartNameFieldMapper(smartName);
|
||||
return smartNameFieldType(smartName);
|
||||
}
|
||||
for (String type : types) {
|
||||
DocumentMapper documentMapper = mappers.get(type);
|
||||
@ -582,7 +580,7 @@ public class MapperService extends AbstractIndexComponent {
|
||||
// see if we find a field for it
|
||||
FieldMappers mappers = documentMapper.mappers().smartName(smartName);
|
||||
if (mappers != null) {
|
||||
return mappers.mapper();
|
||||
return mappers.mapper().fieldType();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -592,10 +590,10 @@ public class MapperService extends AbstractIndexComponent {
|
||||
/**
|
||||
* Given a type (eg. long, string, ...), return an anonymous field mapper that can be used for search operations.
|
||||
*/
|
||||
public FieldMapper unmappedFieldMapper(String type) {
|
||||
final ImmutableMap<String, FieldMapper> unmappedFieldMappers = this.unmappedFieldMappers;
|
||||
FieldMapper mapper = unmappedFieldMappers.get(type);
|
||||
if (mapper == null) {
|
||||
public MappedFieldType unmappedFieldType(String type) {
|
||||
final ImmutableMap<String, MappedFieldType> unmappedFieldMappers = this.unmappedFieldTypes;
|
||||
MappedFieldType fieldType = unmappedFieldMappers.get(type);
|
||||
if (fieldType == null) {
|
||||
final Mapper.TypeParser.ParserContext parserContext = documentMapperParser().parserContext();
|
||||
Mapper.TypeParser typeParser = parserContext.typeParser(type);
|
||||
if (typeParser == null) {
|
||||
@ -603,16 +601,16 @@ public class MapperService extends AbstractIndexComponent {
|
||||
}
|
||||
final Mapper.Builder<?, ?> builder = typeParser.parse("__anonymous_" + type, ImmutableMap.<String, Object>of(), parserContext);
|
||||
final BuilderContext builderContext = new BuilderContext(indexSettings, new ContentPath(1));
|
||||
mapper = (FieldMapper) builder.build(builderContext);
|
||||
fieldType = ((FieldMapper)builder.build(builderContext)).fieldType();
|
||||
|
||||
// There is no need to synchronize writes here. In the case of concurrent access, we could just
|
||||
// compute some mappers several times, which is not a big deal
|
||||
this.unmappedFieldMappers = ImmutableMap.<String, FieldMapper>builder()
|
||||
this.unmappedFieldTypes = ImmutableMap.<String, MappedFieldType>builder()
|
||||
.putAll(unmappedFieldMappers)
|
||||
.put(type, mapper)
|
||||
.put(type, fieldType)
|
||||
.build();
|
||||
}
|
||||
return mapper;
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
public Analyzer searchAnalyzer() {
|
||||
@ -702,9 +700,9 @@ public class MapperService extends AbstractIndexComponent {
|
||||
|
||||
@Override
|
||||
protected Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
FieldMapper mapper = smartNameFieldMapper(fieldName);
|
||||
if (mapper != null && mapper.fieldType().searchAnalyzer() != null) {
|
||||
return mapper.fieldType().searchAnalyzer();
|
||||
MappedFieldType fieldType = smartNameFieldType(fieldName);
|
||||
if (fieldType != null && fieldType.searchAnalyzer() != null) {
|
||||
return fieldType.searchAnalyzer();
|
||||
}
|
||||
return defaultAnalyzer;
|
||||
}
|
||||
@ -721,9 +719,9 @@ public class MapperService extends AbstractIndexComponent {
|
||||
|
||||
@Override
|
||||
protected Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
FieldMapper mapper = smartNameFieldMapper(fieldName);
|
||||
if (mapper != null && mapper.fieldType().searchQuoteAnalyzer() != null) {
|
||||
return mapper.fieldType().searchQuoteAnalyzer();
|
||||
MappedFieldType fieldType = smartNameFieldType(fieldName);
|
||||
if (fieldType != null && fieldType.searchQuoteAnalyzer() != null) {
|
||||
return fieldType.searchQuoteAnalyzer();
|
||||
}
|
||||
return defaultAnalyzer;
|
||||
}
|
||||
|
@ -28,18 +28,11 @@ import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
@ -52,7 +45,6 @@ import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityLookupService;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
@ -228,6 +220,11 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Builder nullValue(Object nullValue) {
|
||||
this.fieldType.setNullValue(nullValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
public T multiFieldPathType(ContentPath.Type pathType) {
|
||||
multiFieldsBuilder.pathType(pathType);
|
||||
return builder;
|
||||
@ -383,67 +380,6 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
||||
return multiFields.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Object value(Object value) {
|
||||
return fieldType().value(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Object valueForSearch(Object value) {
|
||||
return fieldType().valueForSearch(value);
|
||||
}
|
||||
|
||||
// TODO: this is not final so ParentFieldMapper can have custom behavior, per type...
|
||||
@Override
|
||||
public BytesRef indexedValueForSearch(Object value) {
|
||||
return fieldType().indexedValueForSearch(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query queryStringTermQuery(Term term) {
|
||||
return fieldType().queryStringTermQuery(term);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean useTermQueryWithQueryString() {
|
||||
return fieldType().useTermQueryWithQueryString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query termQuery(Object value, @Nullable QueryParseContext context) {
|
||||
return fieldType().termQuery(value, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query termsQuery(List values, @Nullable QueryParseContext context) {
|
||||
return fieldType().termsQuery(values, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return fieldType().rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
return fieldType().fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
|
||||
return fieldType().prefixQuery(value, method, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
|
||||
return fieldType().regexpQuery(value, flags, maxDeterminizedStates, method, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
@ -679,21 +615,6 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
||||
multiFields.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isNumeric() {
|
||||
return fieldType().isNumeric();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean isSortable() {
|
||||
return fieldType().isSortable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsNullValue() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public static class MultiFields {
|
||||
|
||||
public static MultiFields empty() {
|
||||
@ -903,9 +824,4 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
||||
public boolean isGenerated() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final FieldStats stats(Terms terms, int maxDoc) throws IOException {
|
||||
return fieldType().stats(terms, maxDoc);
|
||||
}
|
||||
}
|
||||
|
@ -22,8 +22,6 @@ package org.elasticsearch.index.mapper.core;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
@ -39,7 +37,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
@ -68,8 +65,6 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Boolean NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Values {
|
||||
@ -79,18 +74,11 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, BooleanFieldMapper> {
|
||||
|
||||
private Boolean nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(boolean nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder tokenized(boolean tokenized) {
|
||||
if (tokenized) {
|
||||
@ -102,7 +90,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
@Override
|
||||
public BooleanFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new BooleanFieldMapper(fieldType, docValues, nullValue,
|
||||
return new BooleanFieldMapper(fieldType, docValues,
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
@ -128,7 +116,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
static final class BooleanFieldType extends MappedFieldType {
|
||||
public static final class BooleanFieldType extends MappedFieldType {
|
||||
|
||||
public BooleanFieldType() {
|
||||
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
|
||||
@ -143,6 +131,11 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
return new BooleanFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean nullValue() {
|
||||
return (Boolean)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef indexedValueForSearch(Object value) {
|
||||
if (value == null) {
|
||||
@ -198,12 +191,14 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Boolean nullValue;
|
||||
|
||||
protected BooleanFieldMapper(MappedFieldType fieldType, Boolean docValues, Boolean nullValue,
|
||||
protected BooleanFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BooleanFieldType fieldType() {
|
||||
return (BooleanFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -217,14 +212,6 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
return new FieldDataType(CONTENT_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) {
|
||||
@ -235,8 +222,8 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
if (value == null) {
|
||||
XContentParser.Token token = context.parser().currentToken();
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
if (nullValue != null) {
|
||||
value = nullValue;
|
||||
if (fieldType().nullValue() != null) {
|
||||
value = fieldType().nullValue();
|
||||
}
|
||||
} else {
|
||||
value = context.parser().booleanValue();
|
||||
@ -260,7 +247,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((BooleanFieldMapper) mergeWith).nullValue;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((BooleanFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -272,8 +261,8 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -70,28 +70,19 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
static {
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Byte NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, ByteFieldMapper> {
|
||||
|
||||
protected Byte nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(byte nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
ByteFieldMapper fieldMapper = new ByteFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context),
|
||||
ByteFieldMapper fieldMapper = new ByteFieldMapper(fieldType, docValues, ignoreMalformed(context),
|
||||
coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -142,6 +133,11 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
return new ByteFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Byte nullValue() {
|
||||
return (Byte)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Byte value(Object value) {
|
||||
if (value == null) {
|
||||
@ -191,16 +187,15 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Byte nullValue;
|
||||
|
||||
private String nullValueAsString;
|
||||
|
||||
protected ByteFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
Byte nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteFieldType fieldType() {
|
||||
return (ByteFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -223,14 +218,6 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
return Byte.parseByte(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -243,17 +230,17 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
if (externalValue == null) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else if (externalValue instanceof String) {
|
||||
String sExternalValue = (String) externalValue;
|
||||
if (sExternalValue.length() == 0) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else {
|
||||
value = Byte.parseByte(sExternalValue);
|
||||
}
|
||||
@ -267,17 +254,17 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
|
||||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
|
||||
value = fieldType().nullValue();
|
||||
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Byte objValue = nullValue;
|
||||
Byte objValue = fieldType().nullValue();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -327,8 +314,9 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((ByteFieldMapper) mergeWith).nullValue;
|
||||
this.nullValueAsString = ((ByteFieldMapper) mergeWith).nullValueAsString;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((ByteFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -339,8 +327,8 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
|
||||
import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionTokenStream;
|
||||
@ -72,7 +71,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
public static final String CONTENT_TYPE = "completion";
|
||||
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new CompletionFieldType();
|
||||
public static final CompletionFieldType FIELD_TYPE = new CompletionFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
@ -149,8 +148,10 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
@Override
|
||||
public CompletionFieldMapper build(Mapper.BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new CompletionFieldMapper(fieldType, null, payloads,
|
||||
preserveSeparators, preservePositionIncrements, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, this.contextMapping);
|
||||
CompletionFieldType completionFieldType = (CompletionFieldType)fieldType;
|
||||
completionFieldType.setProvider(new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads));
|
||||
completionFieldType.setContextMapping(contextMapping);
|
||||
return new CompletionFieldMapper(fieldType, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
|
||||
}
|
||||
@ -220,7 +221,10 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
static final class CompletionFieldType extends MappedFieldType {
|
||||
public static final class CompletionFieldType extends MappedFieldType {
|
||||
private PostingsFormat postingsFormat;
|
||||
private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
|
||||
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
|
||||
|
||||
public CompletionFieldType() {
|
||||
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
|
||||
@ -228,13 +232,46 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
|
||||
protected CompletionFieldType(CompletionFieldType ref) {
|
||||
super(ref);
|
||||
this.postingsFormat = ref.postingsFormat;
|
||||
this.analyzingSuggestLookupProvider = ref.analyzingSuggestLookupProvider;
|
||||
this.contextMapping = ref.contextMapping;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
public CompletionFieldType clone() {
|
||||
return new CompletionFieldType(this);
|
||||
}
|
||||
|
||||
public void setProvider(AnalyzingCompletionLookupProvider provider) {
|
||||
checkIfFrozen();
|
||||
this.analyzingSuggestLookupProvider = provider;
|
||||
}
|
||||
|
||||
public synchronized PostingsFormat postingsFormat(PostingsFormat in) {
|
||||
if (in instanceof Completion090PostingsFormat) {
|
||||
throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class);
|
||||
}
|
||||
if (postingsFormat == null) {
|
||||
postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider);
|
||||
}
|
||||
return postingsFormat;
|
||||
}
|
||||
|
||||
public void setContextMapping(SortedMap<String, ContextMapping> contextMapping) {
|
||||
checkIfFrozen();
|
||||
this.contextMapping = contextMapping;
|
||||
}
|
||||
|
||||
/** Get the context mapping associated with this completion field */
|
||||
public SortedMap<String, ContextMapping> getContextMapping() {
|
||||
return contextMapping;
|
||||
}
|
||||
|
||||
/** @return true if a context mapping has been defined */
|
||||
public boolean requiresContext() {
|
||||
return contextMapping.isEmpty() == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String value(Object value) {
|
||||
if (value == null) {
|
||||
@ -251,52 +288,25 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
|
||||
private static final BytesRef EMPTY = new BytesRef();
|
||||
|
||||
private PostingsFormat postingsFormat;
|
||||
private final AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
|
||||
private final boolean payloads;
|
||||
private final boolean preservePositionIncrements;
|
||||
private final boolean preserveSeparators;
|
||||
private int maxInputLength;
|
||||
private final SortedMap<String, ContextMapping> contextMapping;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param contextMappings Configuration of context type. If none should be used set {@link ContextMapping.EMPTY_MAPPING}
|
||||
* @param wrappedPostingsFormat the postings format to wrap, or {@code null} to wrap the codec's default postings format
|
||||
*/
|
||||
// Custom postings formats are deprecated but we still accept a postings format here to be able to test backward compatibility
|
||||
// with older postings formats such as Elasticsearch090
|
||||
public CompletionFieldMapper(MappedFieldType fieldType, PostingsFormat wrappedPostingsFormat, boolean payloads,
|
||||
boolean preserveSeparators, boolean preservePositionIncrements, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, SortedMap<String, ContextMapping> contextMappings) {
|
||||
public CompletionFieldMapper(MappedFieldType fieldType, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, false, null, indexSettings, multiFields, copyTo);
|
||||
analyzingSuggestLookupProvider = new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads);
|
||||
if (wrappedPostingsFormat == null) {
|
||||
// delayed until postingsFormat() is called
|
||||
this.postingsFormat = null;
|
||||
} else {
|
||||
this.postingsFormat = new Completion090PostingsFormat(wrappedPostingsFormat, analyzingSuggestLookupProvider);
|
||||
}
|
||||
this.preserveSeparators = preserveSeparators;
|
||||
this.payloads = payloads;
|
||||
this.preservePositionIncrements = preservePositionIncrements;
|
||||
this.maxInputLength = maxInputLength;
|
||||
this.contextMapping = contextMappings;
|
||||
}
|
||||
|
||||
public synchronized PostingsFormat postingsFormat(PostingsFormat in) {
|
||||
if (in instanceof Completion090PostingsFormat) {
|
||||
throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class);
|
||||
}
|
||||
if (postingsFormat == null) {
|
||||
postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider);
|
||||
}
|
||||
return postingsFormat;
|
||||
@Override
|
||||
public CompletionFieldType fieldType() {
|
||||
return (CompletionFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
throw new MapperParsingException("completion field [" + fieldType().names().fullName() + "] does not support null values");
|
||||
}
|
||||
|
||||
String surfaceForm = null;
|
||||
BytesRef payload = null;
|
||||
@ -322,7 +332,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
if (token == Token.START_OBJECT) {
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
String name = parser.text();
|
||||
ContextMapping mapping = contextMapping.get(name);
|
||||
ContextMapping mapping = fieldType().getContextMapping().get(name);
|
||||
if (mapping == null) {
|
||||
throw new ElasticsearchParseException("context [" + name + "] is not defined");
|
||||
} else {
|
||||
@ -331,7 +341,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
}
|
||||
contextConfig = Maps.newTreeMap();
|
||||
for (ContextMapping mapping : contextMapping.values()) {
|
||||
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
|
||||
ContextConfig config = configs.get(mapping.name());
|
||||
contextConfig.put(mapping.name(), config==null ? mapping.defaultConfig() : config);
|
||||
}
|
||||
@ -389,7 +399,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
|
||||
if(contextConfig == null) {
|
||||
contextConfig = Maps.newTreeMap();
|
||||
for (ContextMapping mapping : contextMapping.values()) {
|
||||
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
|
||||
contextConfig.put(mapping.name(), mapping.defaultConfig());
|
||||
}
|
||||
}
|
||||
@ -402,13 +412,13 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
if (input.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
|
||||
input), weight, payload);
|
||||
BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef(
|
||||
input), weight, payload);
|
||||
context.doc().add(getCompletionField(ctx, input, suggestPayload));
|
||||
}
|
||||
} else {
|
||||
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
|
||||
surfaceForm), weight, payload);
|
||||
BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef(
|
||||
surfaceForm), weight, payload);
|
||||
for (String input : inputs) {
|
||||
if (input.length() == 0) {
|
||||
continue;
|
||||
@ -425,22 +435,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the context mapping associated with this completion field.
|
||||
*/
|
||||
public SortedMap<String, ContextMapping> getContextMapping() {
|
||||
return contextMapping;
|
||||
}
|
||||
|
||||
/** @return true if a context mapping has been defined */
|
||||
public boolean requiresContext() {
|
||||
return !contextMapping.isEmpty();
|
||||
}
|
||||
|
||||
public Field getCompletionField(String input, BytesRef payload) {
|
||||
return getCompletionField(ContextMapping.EMPTY_CONTEXT, input, payload);
|
||||
}
|
||||
|
||||
public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) {
|
||||
final String originalInput = input;
|
||||
if (input.length() > maxInputLength) {
|
||||
@ -454,7 +448,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
+ "] at position " + i + " is a reserved character");
|
||||
}
|
||||
}
|
||||
return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider);
|
||||
return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, fieldType().analyzingSuggestLookupProvider);
|
||||
}
|
||||
|
||||
public static int correctSubStringLen(String input, int len) {
|
||||
@ -466,8 +460,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
|
||||
public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException {
|
||||
return analyzingSuggestLookupProvider.buildPayload(
|
||||
surfaceForm, weight, payload);
|
||||
return fieldType().analyzingSuggestLookupProvider.buildPayload(surfaceForm, weight, payload);
|
||||
}
|
||||
|
||||
private static final class SuggestField extends Field {
|
||||
@ -498,15 +491,15 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
if (fieldType.indexAnalyzer().name().equals(fieldType.searchAnalyzer().name()) == false) {
|
||||
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType.searchAnalyzer().name());
|
||||
}
|
||||
builder.field(Fields.PAYLOADS, this.payloads);
|
||||
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators);
|
||||
builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), this.preservePositionIncrements);
|
||||
builder.field(Fields.PAYLOADS, fieldType().analyzingSuggestLookupProvider.hasPayloads());
|
||||
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreserveSep());
|
||||
builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements());
|
||||
builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength);
|
||||
multiFields.toXContent(builder, params);
|
||||
|
||||
if(!contextMapping.isEmpty()) {
|
||||
if(fieldType().requiresContext()) {
|
||||
builder.startObject(Fields.CONTEXT);
|
||||
for (ContextMapping mapping : contextMapping.values()) {
|
||||
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
|
||||
builder.value(mapping);
|
||||
}
|
||||
builder.endObject();
|
||||
@ -524,11 +517,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsNullValue() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
@ -540,23 +528,23 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
||||
}
|
||||
|
||||
public boolean isStoringPayloads() {
|
||||
return payloads;
|
||||
return fieldType().analyzingSuggestLookupProvider.hasPayloads();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
|
||||
if (payloads != fieldMergeWith.payloads) {
|
||||
if (fieldType().analyzingSuggestLookupProvider.hasPayloads() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.hasPayloads()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different payload values");
|
||||
}
|
||||
if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) {
|
||||
if (fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_position_increments' values");
|
||||
}
|
||||
if (preserveSeparators != fieldMergeWith.preserveSeparators) {
|
||||
if (fieldType().analyzingSuggestLookupProvider.getPreserveSep() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreserveSep()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_separators' values");
|
||||
}
|
||||
if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) {
|
||||
if(!ContextMapping.mappingsAreEqual(fieldType().getContextMapping(), fieldMergeWith.fieldType().getContextMapping())) {
|
||||
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values");
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
|
@ -115,8 +115,9 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
@Override
|
||||
public DateFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
fieldType.setNullValue(nullValue);
|
||||
DateFieldMapper fieldMapper = new DateFieldMapper(fieldType,
|
||||
docValues, nullValue, ignoreMalformed(context), coerce(context),
|
||||
docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -374,12 +375,9 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private String nullValue;
|
||||
|
||||
protected DateFieldMapper(MappedFieldType fieldType, Boolean docValues, String nullValue, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
|
||||
protected DateFieldMapper(MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -409,15 +407,6 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -431,13 +420,13 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
Object externalValue = context.externalValue();
|
||||
dateAsString = (String) externalValue;
|
||||
if (dateAsString == null) {
|
||||
dateAsString = nullValue;
|
||||
dateAsString = fieldType.nullValueAsString();
|
||||
}
|
||||
} else {
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
dateAsString = nullValue;
|
||||
dateAsString = fieldType.nullValueAsString();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
dateAsString = parser.text();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
@ -448,7 +437,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
} else {
|
||||
if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
dateAsString = nullValue;
|
||||
dateAsString = fieldType.nullValueAsString();
|
||||
} else {
|
||||
dateAsString = parser.text();
|
||||
}
|
||||
@ -496,9 +485,9 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((DateFieldMapper) mergeWith).nullValue;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
fieldType().setDateTimeFormatter(((DateFieldMapper) mergeWith).fieldType().dateTimeFormatter());
|
||||
this.fieldType.setNullValue(((DateFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
@ -511,8 +500,8 @@ public class DateFieldMapper extends NumberFieldMapper {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
builder.field("format", fieldType().dateTimeFormatter().format());
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType.nullValueAsString() != null) {
|
||||
builder.field("null_value", fieldType.nullValueAsString());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -23,10 +23,8 @@ import com.carrotsearch.hppc.DoubleArrayList;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
@ -76,28 +74,19 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
static {
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Double NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, DoubleFieldMapper> {
|
||||
|
||||
protected Double nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(double nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DoubleFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
|
||||
DoubleFieldMapper fieldMapper = new DoubleFieldMapper(fieldType, docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -148,6 +137,11 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
return new DoubleFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double nullValue() {
|
||||
return (Double)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double value(Object value) {
|
||||
if (value == null) {
|
||||
@ -198,15 +192,14 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Double nullValue;
|
||||
|
||||
private String nullValueAsString;
|
||||
|
||||
protected DoubleFieldMapper(MappedFieldType fieldType, Boolean docValues, Double nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
protected DoubleFieldMapper(MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DoubleFieldType fieldType() {
|
||||
return (DoubleFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -219,18 +212,6 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
return new FieldDataType("double");
|
||||
}
|
||||
|
||||
public Query rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
return NumericRangeQuery.newDoubleRange(fieldType.names().indexName(), fieldType.numericPrecisionStep(), lowerTerm, upperTerm, includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -243,17 +224,17 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
if (externalValue == null) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else if (externalValue instanceof String) {
|
||||
String sExternalValue = (String) externalValue;
|
||||
if (sExternalValue.length() == 0) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else {
|
||||
value = Double.parseDouble(sExternalValue);
|
||||
}
|
||||
@ -267,17 +248,17 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
|
||||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
|
||||
value = fieldType().nullValue();
|
||||
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Double objValue = nullValue;
|
||||
Double objValue = fieldType().nullValue();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -338,8 +319,9 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((DoubleFieldMapper) mergeWith).nullValue;
|
||||
this.nullValueAsString = ((DoubleFieldMapper) mergeWith).nullValueAsString;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((DoubleFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -350,8 +332,8 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -77,28 +77,19 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
static {
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Float NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, FloatFieldMapper> {
|
||||
|
||||
protected Float nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(float nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FloatFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
FloatFieldMapper fieldMapper = new FloatFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
|
||||
FloatFieldMapper fieldMapper = new FloatFieldMapper(fieldType, docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -149,6 +140,11 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
return new FloatFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float nullValue() {
|
||||
return (Float)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float value(Object value) {
|
||||
if (value == null) {
|
||||
@ -199,16 +195,15 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Float nullValue;
|
||||
|
||||
private String nullValueAsString;
|
||||
|
||||
protected FloatFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
Float nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FloatFieldType fieldType() {
|
||||
return (FloatFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -231,14 +226,6 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
return Float.parseFloat(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -251,17 +238,17 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
if (externalValue == null) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else if (externalValue instanceof String) {
|
||||
String sExternalValue = (String) externalValue;
|
||||
if (sExternalValue.length() == 0) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else {
|
||||
value = Float.parseFloat(sExternalValue);
|
||||
}
|
||||
@ -275,17 +262,17 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
|
||||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
|
||||
value = fieldType().nullValue();
|
||||
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Float objValue = nullValue;
|
||||
Float objValue = fieldType().nullValue();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -346,8 +333,9 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((FloatFieldMapper) mergeWith).nullValue;
|
||||
this.nullValueAsString = ((FloatFieldMapper) mergeWith).nullValueAsString;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((FloatFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -359,8 +347,8 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -42,6 +42,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
@ -72,21 +73,17 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
static {
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Integer NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, IntegerFieldMapper> {
|
||||
|
||||
protected Integer nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(int nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
this.fieldType.setNullValue(nullValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -94,7 +91,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
public IntegerFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
IntegerFieldMapper fieldMapper = new IntegerFieldMapper(fieldType, docValues,
|
||||
nullValue, ignoreMalformed(context), coerce(context), fieldDataSettings,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -132,7 +129,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
static final class IntegerFieldType extends NumberFieldType {
|
||||
public static final class IntegerFieldType extends NumberFieldType {
|
||||
|
||||
public IntegerFieldType() {}
|
||||
|
||||
@ -145,6 +142,11 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
return new IntegerFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer nullValue() {
|
||||
return (Integer)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer value(Object value) {
|
||||
if (value == null) {
|
||||
@ -194,17 +196,16 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Integer nullValue;
|
||||
|
||||
private String nullValueAsString;
|
||||
|
||||
protected IntegerFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
Integer nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IntegerFieldType fieldType() {
|
||||
return (IntegerFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -217,8 +218,6 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
return new FieldDataType("int");
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static int parseValue(Object value) {
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).intValue();
|
||||
@ -229,14 +228,6 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
return Integer.parseInt(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -249,17 +240,17 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
if (externalValue == null) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType.nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else if (externalValue instanceof String) {
|
||||
String sExternalValue = (String) externalValue;
|
||||
if (sExternalValue.length() == 0) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else {
|
||||
value = Integer.parseInt(sExternalValue);
|
||||
}
|
||||
@ -273,17 +264,17 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
|
||||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType.nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
|
||||
value = fieldType().nullValue();
|
||||
if (fieldType.nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), fieldType.nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Integer objValue = nullValue;
|
||||
Integer objValue = fieldType().nullValue();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -316,7 +307,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
|
||||
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
|
||||
CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, (NumberFieldType)fieldType);
|
||||
CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, fieldType);
|
||||
field.setBoost(boost);
|
||||
fields.add(field);
|
||||
}
|
||||
@ -325,10 +316,6 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
protected Integer nullValue() {
|
||||
return nullValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
@ -341,8 +328,9 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((IntegerFieldMapper) mergeWith).nullValue;
|
||||
this.nullValueAsString = ((IntegerFieldMapper) mergeWith).nullValueAsString;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((FieldMapper)mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -353,8 +341,8 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType.nullValue() != null) {
|
||||
builder.field("null_value", fieldType.nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -72,28 +72,24 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
static {
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Long NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, LongFieldMapper> {
|
||||
|
||||
protected Long nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(long nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
this.fieldType.setNullValue(nullValue);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
LongFieldMapper fieldMapper = new LongFieldMapper(fieldType, docValues, nullValue,
|
||||
LongFieldMapper fieldMapper = new LongFieldMapper(fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -144,6 +140,11 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
return new LongFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long nullValue() {
|
||||
return (Long)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long value(Object value) {
|
||||
if (value == null) {
|
||||
@ -193,17 +194,16 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Long nullValue;
|
||||
|
||||
private String nullValueAsString;
|
||||
|
||||
protected LongFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
Long nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongFieldType fieldType() {
|
||||
return (LongFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -216,14 +216,6 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
return new FieldDataType("long");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -236,17 +228,17 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
if (externalValue == null) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else if (externalValue instanceof String) {
|
||||
String sExternalValue = (String) externalValue;
|
||||
if (sExternalValue.length() == 0) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else {
|
||||
value = Long.parseLong(sExternalValue);
|
||||
}
|
||||
@ -260,17 +252,17 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
|
||||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
|
||||
value = fieldType().nullValue();
|
||||
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Long objValue = nullValue;
|
||||
Long objValue = fieldType().nullValue();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -320,8 +312,9 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((LongFieldMapper) mergeWith).nullValue;
|
||||
this.nullValueAsString = ((LongFieldMapper) mergeWith).nullValueAsString;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((LongFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -332,8 +325,8 @@ public class LongFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -47,6 +47,7 @@ public class Murmur3FieldMapper extends LongFieldMapper {
|
||||
public static final String CONTENT_TYPE = "murmur3";
|
||||
|
||||
public static class Defaults extends LongFieldMapper.Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new Murmur3FieldType();
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, Murmur3FieldMapper> {
|
||||
@ -60,7 +61,7 @@ public class Murmur3FieldMapper extends LongFieldMapper {
|
||||
@Override
|
||||
public Murmur3FieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(fieldType, docValues, null,
|
||||
Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
@ -104,11 +105,25 @@ public class Murmur3FieldMapper extends LongFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
// this only exists so a check can be done to match the field type to using murmur3 hashing...
|
||||
public static class Murmur3FieldType extends LongFieldMapper.LongFieldType {
|
||||
public Murmur3FieldType() {}
|
||||
|
||||
protected Murmur3FieldType(Murmur3FieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Murmur3FieldType clone() {
|
||||
return new Murmur3FieldType(this);
|
||||
}
|
||||
}
|
||||
|
||||
protected Murmur3FieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
Long nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, nullValue, ignoreMalformed, coerce,
|
||||
super(fieldType, docValues, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
}
|
||||
|
||||
|
@ -73,28 +73,19 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
static {
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final Short NULL_VALUE = null;
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, ShortFieldMapper> {
|
||||
|
||||
protected Short nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(short nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShortFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
ShortFieldMapper fieldMapper = new ShortFieldMapper(fieldType, docValues, nullValue,
|
||||
ShortFieldMapper fieldMapper = new ShortFieldMapper(fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
@ -147,6 +138,11 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
return new ShortFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Short nullValue() {
|
||||
return (Short)super.nullValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Short value(Object value) {
|
||||
if (value == null) {
|
||||
@ -196,18 +192,17 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private Short nullValue;
|
||||
|
||||
private String nullValueAsString;
|
||||
|
||||
protected ShortFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
Short nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShortFieldType fieldType() {
|
||||
return (ShortFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -230,14 +225,6 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
return Short.parseShort(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean customBoost() {
|
||||
return true;
|
||||
@ -250,17 +237,17 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
if (context.externalValueSet()) {
|
||||
Object externalValue = context.externalValue();
|
||||
if (externalValue == null) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else if (externalValue instanceof String) {
|
||||
String sExternalValue = (String) externalValue;
|
||||
if (sExternalValue.length() == 0) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
value = fieldType().nullValue();
|
||||
} else {
|
||||
value = Short.parseShort(sExternalValue);
|
||||
}
|
||||
@ -274,17 +261,17 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
XContentParser parser = context.parser();
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
|
||||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
|
||||
if (nullValue == null) {
|
||||
if (fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
value = nullValue;
|
||||
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost);
|
||||
value = fieldType().nullValue();
|
||||
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
|
||||
context.allEntries().addText(fieldType.names().fullName(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Short objValue = nullValue;
|
||||
Short objValue = fieldType().nullValue();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
@ -334,8 +321,9 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((ShortFieldMapper) mergeWith).nullValue;
|
||||
this.nullValueAsString = ((ShortFieldMapper) mergeWith).nullValueAsString;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((ShortFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -346,8 +334,8 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != DEFAULT_PRECISION_STEP) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -83,11 +83,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(String nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
|
||||
super.searchAnalyzer(searchAnalyzer);
|
||||
@ -135,7 +130,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
defaultFieldType.freeze();
|
||||
setupFieldType(context);
|
||||
StringFieldMapper fieldMapper = new StringFieldMapper(
|
||||
fieldType, defaultFieldType, docValues, nullValue, positionOffsetGap, ignoreAbove,
|
||||
fieldType, defaultFieldType, docValues, positionOffsetGap, ignoreAbove,
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -189,7 +184,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
}
|
||||
}
|
||||
|
||||
static final class StringFieldType extends MappedFieldType {
|
||||
public static final class StringFieldType extends MappedFieldType {
|
||||
|
||||
public StringFieldType() {
|
||||
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
|
||||
@ -210,23 +205,29 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
}
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueQuery() {
|
||||
if (nullValue() == null) {
|
||||
return null;
|
||||
}
|
||||
return termQuery(nullValue(), null);
|
||||
}
|
||||
}
|
||||
|
||||
private String nullValue;
|
||||
private Boolean includeInAll;
|
||||
private int positionOffsetGap;
|
||||
private int ignoreAbove;
|
||||
private final MappedFieldType defaultFieldType;
|
||||
|
||||
protected StringFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, Boolean docValues,
|
||||
String nullValue, int positionOffsetGap, int ignoreAbove, @Nullable Settings fieldDataSettings,
|
||||
int positionOffsetGap, int ignoreAbove, @Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
|
||||
throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values");
|
||||
}
|
||||
this.defaultFieldType = defaultFieldType;
|
||||
this.nullValue = nullValue;
|
||||
this.positionOffsetGap = positionOffsetGap;
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
}
|
||||
@ -273,17 +274,9 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
return ignoreAbove;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return termQuery(nullValue, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, fieldType.boost());
|
||||
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType.boost());
|
||||
if (valueAndBoost.value() == null) {
|
||||
return;
|
||||
}
|
||||
@ -359,8 +352,10 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll;
|
||||
this.nullValue = ((StringFieldMapper) mergeWith).nullValue;
|
||||
this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((StringFieldMapper) mergeWith).fieldType().nullValue());
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
@ -368,8 +363,8 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -60,7 +60,6 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, TokenCountFieldMapper> {
|
||||
private Integer nullValue = Defaults.NULL_VALUE;
|
||||
private NamedAnalyzer analyzer;
|
||||
|
||||
public Builder(String name) {
|
||||
@ -68,11 +67,6 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(int nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder analyzer(NamedAnalyzer analyzer) {
|
||||
this.analyzer = analyzer;
|
||||
return this;
|
||||
@ -85,7 +79,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
||||
@Override
|
||||
public TokenCountFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(fieldType, docValues, nullValue,
|
||||
TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(fieldType, docValues,
|
||||
ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(),
|
||||
analyzer, multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
@ -134,26 +128,24 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
|
||||
|
||||
private NamedAnalyzer analyzer;
|
||||
|
||||
protected TokenCountFieldMapper(MappedFieldType fieldType, Boolean docValues, Integer nullValue,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer,
|
||||
MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, nullValue, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
|
||||
protected TokenCountFieldMapper(MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, Settings fieldDataSettings, Settings indexSettings,
|
||||
NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.analyzer = analyzer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType.boost());
|
||||
if (valueAndBoost.value() == null && nullValue() == null) {
|
||||
if (valueAndBoost.value() == null && fieldType().nullValue() == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (fieldType.indexOptions() != NONE || fieldType.stored() || fieldType().hasDocValues()) {
|
||||
int count;
|
||||
if (valueAndBoost.value() == null) {
|
||||
count = nullValue();
|
||||
count = fieldType().nullValue();
|
||||
} else {
|
||||
count = countPositions(analyzer.analyzer().tokenStream(fieldType().names().shortName(), valueAndBoost.value()));
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||
public static final String NAME = FieldNamesFieldMapper.NAME;
|
||||
|
||||
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_ENABLED;
|
||||
public static final boolean ENABLED = true;
|
||||
public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType();
|
||||
|
||||
static {
|
||||
@ -82,7 +82,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
}
|
||||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, FieldNamesFieldMapper> {
|
||||
private EnabledAttributeMapper enabledState = Defaults.ENABLED_STATE;
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
@ -97,14 +97,16 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
}
|
||||
|
||||
public Builder enabled(boolean enabled) {
|
||||
this.enabledState = enabled ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
|
||||
this.enabled = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldNamesFieldMapper build(BuilderContext context) {
|
||||
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
|
||||
return new FieldNamesFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings());
|
||||
setupFieldType(context);
|
||||
FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldType)fieldType;
|
||||
fieldNamesFieldType.setEnabled(enabled);
|
||||
return new FieldNamesFieldMapper(fieldType, fieldDataSettings, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
@ -133,7 +135,9 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
}
|
||||
}
|
||||
|
||||
static final class FieldNamesFieldType extends MappedFieldType {
|
||||
public static final class FieldNamesFieldType extends MappedFieldType {
|
||||
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
public FieldNamesFieldType() {
|
||||
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
|
||||
@ -141,10 +145,20 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
|
||||
protected FieldNamesFieldType(FieldNamesFieldType ref) {
|
||||
super(ref);
|
||||
this.enabled = ref.enabled;
|
||||
}
|
||||
|
||||
public void setEnabled(boolean enabled) {
|
||||
checkIfFrozen();
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
public FieldNamesFieldType clone() {
|
||||
return new FieldNamesFieldType(this);
|
||||
}
|
||||
|
||||
@ -163,22 +177,26 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
}
|
||||
|
||||
private final MappedFieldType defaultFieldType;
|
||||
private EnabledAttributeMapper enabledState;
|
||||
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
|
||||
|
||||
public FieldNamesFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings);
|
||||
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
|
||||
}
|
||||
|
||||
public FieldNamesFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
public FieldNamesFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, false, fieldDataSettings, indexSettings);
|
||||
this.defaultFieldType = Defaults.FIELD_TYPE;
|
||||
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
|
||||
this.enabledState = enabledState;
|
||||
if (this.pre13Index) {
|
||||
this.fieldType = this.fieldType.clone();
|
||||
fieldType().setEnabled(false);
|
||||
this.fieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return pre13Index == false && enabledState.enabled;
|
||||
@Override
|
||||
public FieldNamesFieldType fieldType() {
|
||||
return (FieldNamesFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -240,7 +258,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
if (enabledState.enabled == false) {
|
||||
if (fieldType().isEnabled() == false) {
|
||||
return;
|
||||
}
|
||||
for (ParseContext.Document document : context.docs()) {
|
||||
@ -270,13 +288,13 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
|
||||
if (includeDefaults == false && fieldType().equals(Defaults.FIELD_TYPE) && enabledState == Defaults.ENABLED_STATE) {
|
||||
if (includeDefaults == false && fieldType().equals(Defaults.FIELD_TYPE) && fieldType().isEnabled() == Defaults.ENABLED) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
builder.startObject(NAME);
|
||||
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
if (includeDefaults || fieldType().isEnabled() != Defaults.ENABLED) {
|
||||
builder.field("enabled", fieldType().isEnabled());
|
||||
}
|
||||
if (indexCreatedBefore2x && (includeDefaults || fieldType().equals(Defaults.FIELD_TYPE) == false)) {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
@ -290,8 +308,10 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
FieldNamesFieldMapper fieldNamesMapperMergeWith = (FieldNamesFieldMapper)mergeWith;
|
||||
if (!mergeResult.simulate()) {
|
||||
if (fieldNamesMapperMergeWith.enabledState != enabledState && !fieldNamesMapperMergeWith.enabledState.unset()) {
|
||||
this.enabledState = fieldNamesMapperMergeWith.enabledState;
|
||||
if (fieldNamesMapperMergeWith.fieldType().isEnabled() != fieldType().isEnabled()) {
|
||||
this.fieldType = fieldType().clone();
|
||||
fieldType().setEnabled(fieldNamesMapperMergeWith.fieldType().isEnabled());
|
||||
fieldType().freeze();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -185,7 +185,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
|
||||
|
||||
public String value(Document document) {
|
||||
Field field = (Field) document.getField(fieldType.names().indexName());
|
||||
return field == null ? null : (String)value(field);
|
||||
return field == null ? null : (String)fieldType().value(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -123,7 +123,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
||||
}
|
||||
|
||||
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, false, Defaults.NULL_VALUE,
|
||||
super(fieldType, false,
|
||||
Defaults.IGNORE_MALFORMED, Defaults.COERCE, fieldDataSettings,
|
||||
indexSettings, MultiFields.empty(), null);
|
||||
this.enabledState = enabled;
|
||||
|
@ -174,7 +174,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
|
||||
|
||||
protected TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce,
|
||||
super(fieldType, false, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, MultiFields.empty(), null);
|
||||
this.enabledState = enabled;
|
||||
this.defaultTTL = defaultTTL;
|
||||
|
@ -251,7 +251,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
||||
protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path,
|
||||
String defaultTimestamp, Boolean ignoreMissing, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, docValues, Defaults.NULL_VALUE, ignoreMalformed, coerce, fieldDataSettings,
|
||||
super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings,
|
||||
indexSettings, MultiFields.empty(), null);
|
||||
this.enabledState = enabledState;
|
||||
this.path = path;
|
||||
|
@ -115,15 +115,10 @@ public class IpFieldMapper extends NumberFieldMapper {
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(String nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IpFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
IpFieldMapper fieldMapper = new IpFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context),
|
||||
IpFieldMapper fieldMapper = new IpFieldMapper(fieldType, docValues, ignoreMalformed(context), coerce(context),
|
||||
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
@ -162,7 +157,7 @@ public class IpFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
static final class IpFieldType extends NumberFieldType {
|
||||
public static final class IpFieldType extends NumberFieldType {
|
||||
|
||||
public IpFieldType() {}
|
||||
|
||||
@ -232,15 +227,12 @@ public class IpFieldMapper extends NumberFieldMapper {
|
||||
}
|
||||
}
|
||||
|
||||
private String nullValue;
|
||||
|
||||
protected IpFieldMapper(MappedFieldType fieldType, Boolean docValues,
|
||||
String nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
|
||||
@Nullable Settings fieldDataSettings,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(fieldType, docValues, ignoreMalformed, coerce,
|
||||
fieldDataSettings, indexSettings, multiFields, copyTo);
|
||||
this.nullValue = nullValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -263,25 +255,17 @@ public class IpFieldMapper extends NumberFieldMapper {
|
||||
return ipToLong(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return new ConstantScoreQuery(termQuery(nullValue, null));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
String ipAsString;
|
||||
if (context.externalValueSet()) {
|
||||
ipAsString = (String) context.externalValue();
|
||||
if (ipAsString == null) {
|
||||
ipAsString = nullValue;
|
||||
ipAsString = fieldType().nullValueAsString();
|
||||
}
|
||||
} else {
|
||||
if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
ipAsString = nullValue;
|
||||
ipAsString = fieldType().nullValueAsString();
|
||||
} else {
|
||||
ipAsString = context.parser().text();
|
||||
}
|
||||
@ -317,7 +301,8 @@ public class IpFieldMapper extends NumberFieldMapper {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
this.nullValue = ((IpFieldMapper) mergeWith).nullValue;
|
||||
this.fieldType = this.fieldType.clone();
|
||||
this.fieldType.setNullValue(((IpFieldMapper) mergeWith).fieldType().nullValue());
|
||||
}
|
||||
}
|
||||
|
||||
@ -328,8 +313,8 @@ public class IpFieldMapper extends NumberFieldMapper {
|
||||
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
|
||||
builder.field("precision_step", fieldType.numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
if (includeDefaults || fieldType().nullValueAsString() != null) {
|
||||
builder.field("null_value", fieldType().nullValueAsString());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fieldvisitor.JustSourceFieldsVisitor;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
@ -54,7 +55,7 @@ final class QueriesLoaderCollector extends SimpleCollector {
|
||||
QueriesLoaderCollector(PercolatorQueriesRegistry percolator, ESLogger logger, MapperService mapperService, IndexFieldDataService indexFieldDataService) {
|
||||
this.percolator = percolator;
|
||||
this.logger = logger;
|
||||
final FieldMapper uidMapper = mapperService.smartNameFieldMapper(UidFieldMapper.NAME);
|
||||
final MappedFieldType uidMapper = mapperService.smartNameFieldType(UidFieldMapper.NAME);
|
||||
this.uidFieldData = indexFieldDataService.getForField(uidMapper);
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -161,20 +161,20 @@ public class CommonTermsQueryParser extends BaseQueryParserTemp {
|
||||
throw new QueryParsingException(parseContext, "No text specified for text query");
|
||||
}
|
||||
String field;
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
field = mapper.fieldType().names().indexName();
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
field = fieldType.names().indexName();
|
||||
} else {
|
||||
field = fieldName;
|
||||
}
|
||||
|
||||
Analyzer analyzer = null;
|
||||
if (queryAnalyzer == null) {
|
||||
if (mapper != null) {
|
||||
analyzer = mapper.fieldType().searchAnalyzer();
|
||||
if (fieldType != null) {
|
||||
analyzer = fieldType.searchAnalyzer();
|
||||
}
|
||||
if (analyzer == null && mapper != null) {
|
||||
analyzer = parseContext.getSearchAnalyzer(mapper);
|
||||
if (analyzer == null && fieldType != null) {
|
||||
analyzer = parseContext.getSearchAnalyzer(fieldType);
|
||||
}
|
||||
if (analyzer == null) {
|
||||
analyzer = parseContext.mapperService().searchAnalyzer();
|
||||
@ -186,7 +186,7 @@ public class CommonTermsQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
}
|
||||
|
||||
ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoords, mapper);
|
||||
ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoords, fieldType);
|
||||
commonsQuery.setBoost(boost);
|
||||
Query query = parseQueryString(commonsQuery, value.toString(), field, parseContext, analyzer, lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch);
|
||||
if (queryName != null) {
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
|
||||
@ -75,7 +76,7 @@ public class ExistsQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
|
||||
public static Query newFilter(QueryParseContext parseContext, String fieldPattern, String queryName) {
|
||||
final FieldNamesFieldMapper fieldNamesMapper = (FieldNamesFieldMapper)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
|
||||
MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern);
|
||||
if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) {
|
||||
@ -91,20 +92,20 @@ public class ExistsQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
BooleanQuery boolFilter = new BooleanQuery();
|
||||
for (String field : fields) {
|
||||
FieldMapper mapper = parseContext.fieldMapper(field);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
Query filter = null;
|
||||
if (fieldNamesMapper!= null && fieldNamesMapper.enabled()) {
|
||||
if (fieldNamesFieldType.isEnabled()) {
|
||||
final String f;
|
||||
if (mapper != null) {
|
||||
f = mapper.fieldType().names().indexName();
|
||||
if (fieldType != null) {
|
||||
f = fieldType.names().indexName();
|
||||
} else {
|
||||
f = field;
|
||||
}
|
||||
filter = fieldNamesMapper.termQuery(f, parseContext);
|
||||
filter = fieldNamesFieldType.termQuery(f, parseContext);
|
||||
}
|
||||
// if _field_names are not indexed, we need to go the slow way
|
||||
if (filter == null && mapper != null) {
|
||||
filter = mapper.rangeQuery(null, null, true, true, parseContext);
|
||||
if (filter == null && fieldType != null) {
|
||||
filter = fieldType.rangeQuery(null, null, true, true, parseContext);
|
||||
}
|
||||
if (filter == null) {
|
||||
filter = new TermRangeQuery(field, null, null, true, true);
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -88,9 +89,9 @@ public class FieldMaskingSpanQueryParser extends BaseQueryParserTemp {
|
||||
throw new QueryParsingException(parseContext, "field_masking_span must have [field] set for it");
|
||||
}
|
||||
|
||||
FieldMapper mapper = parseContext.fieldMapper(field);
|
||||
if (mapper != null) {
|
||||
field = mapper.fieldType().names().indexName();
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
if (fieldType != null) {
|
||||
field = fieldType.names().indexName();
|
||||
}
|
||||
|
||||
FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field);
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -113,9 +114,9 @@ public class FuzzyQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
|
||||
Query query = null;
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
query = mapper.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
|
||||
}
|
||||
if (query == null) {
|
||||
query = new FuzzyQuery(new Term(fieldName, value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
|
||||
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
|
||||
@ -159,20 +160,20 @@ public class GeoBoundingBoxQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
}
|
||||
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
Query filter;
|
||||
if ("indexed".equals(type)) {
|
||||
filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoMapper);
|
||||
filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoFieldType);
|
||||
} else if ("memory".equals(type)) {
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "geo bounding box type [" + type
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
|
||||
@ -145,18 +146,18 @@ public class GeoDistanceQueryParser extends BaseQueryParserTemp {
|
||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
||||
}
|
||||
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
|
||||
Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoMapper, indexFieldData, optimizeBbox);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
|
||||
@ -185,17 +186,17 @@ public class GeoDistanceRangeQueryParser extends BaseQueryParserTemp {
|
||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
||||
}
|
||||
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
|
||||
Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoMapper, indexFieldData, optimizeBbox);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
||||
|
||||
@ -135,15 +136,15 @@ public class GeoPolygonQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
}
|
||||
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
Query query = new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.search.shape.ShapeFetchService;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
@ -136,21 +137,21 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
|
||||
throw new QueryParsingException(parseContext, "No Shape Relation defined");
|
||||
}
|
||||
|
||||
FieldMapper fieldMapper = parseContext.fieldMapper(fieldName);
|
||||
if (fieldMapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// TODO: This isn't the nicest way to check this
|
||||
if (!(fieldMapper instanceof GeoShapeFieldMapper)) {
|
||||
if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "Field [" + fieldName + "] is not a geo_shape");
|
||||
}
|
||||
|
||||
GeoShapeFieldMapper shapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
|
||||
GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
|
||||
|
||||
PrefixTreeStrategy strategy = shapeFieldMapper.fieldType().defaultStrategy();
|
||||
PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy();
|
||||
if (strategyName != null) {
|
||||
strategy = shapeFieldMapper.fieldType().resolveStrategy(strategyName);
|
||||
strategy = shapeFieldType.resolveStrategy(strategyName);
|
||||
}
|
||||
Query query;
|
||||
if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) {
|
||||
|
@ -66,13 +66,13 @@ public class GeohashCellQuery {
|
||||
* returns a boolean filter combining the geohashes OR-wise.
|
||||
*
|
||||
* @param context Context of the filter
|
||||
* @param fieldMapper field mapper for geopoints
|
||||
* @param fieldType field mapper for geopoints
|
||||
* @param geohash mandatory geohash
|
||||
* @param geohashes optional array of additional geohashes
|
||||
* @return a new GeoBoundinboxfilter
|
||||
*/
|
||||
public static Query create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, @Nullable List<CharSequence> geohashes) {
|
||||
MappedFieldType geoHashMapper = fieldMapper.fieldType().geohashFieldType();
|
||||
public static Query create(QueryParseContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
|
||||
MappedFieldType geoHashMapper = fieldType.geohashFieldType();
|
||||
if (geoHashMapper == null) {
|
||||
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
|
||||
}
|
||||
@ -243,17 +243,17 @@ public class GeohashCellQuery {
|
||||
throw new QueryParsingException(parseContext, "no geohash value provided to geohash_cell filter");
|
||||
}
|
||||
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
if (!(mapper instanceof GeoPointFieldMapper)) {
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
|
||||
if (!geoMapper.fieldType().isGeohashPrefixEnabled()) {
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
if (!geoFieldType.isGeohashPrefixEnabled()) {
|
||||
throw new QueryParsingException(parseContext, "can't execute geohash_cell on field [" + fieldName
|
||||
+ "], geohash_prefix is not enabled");
|
||||
}
|
||||
@ -265,9 +265,9 @@ public class GeohashCellQuery {
|
||||
|
||||
Query filter;
|
||||
if (neighbors) {
|
||||
filter = create(parseContext, geoMapper, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
|
||||
filter = create(parseContext, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
|
||||
} else {
|
||||
filter = create(parseContext, geoMapper, geohash, null);
|
||||
filter = create(parseContext, geoFieldType, geohash, null);
|
||||
}
|
||||
|
||||
return filter;
|
||||
|
@ -175,7 +175,7 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
|
||||
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
|
||||
|
||||
final Query query;
|
||||
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
|
||||
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
|
||||
} else {
|
||||
|
@ -165,7 +165,7 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active()) {
|
||||
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
|
||||
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
parentTypes.add(parentFieldMapper.type());
|
||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
|
||||
@ -88,7 +89,7 @@ public class MissingQueryParser extends BaseQueryParserTemp {
|
||||
throw new QueryParsingException(parseContext, "missing must have either existence, or null_value, or both set to true");
|
||||
}
|
||||
|
||||
final FieldNamesFieldMapper fieldNamesMapper = (FieldNamesFieldMapper)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern);
|
||||
if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) {
|
||||
// automatic make the object mapper pattern
|
||||
@ -110,20 +111,20 @@ public class MissingQueryParser extends BaseQueryParserTemp {
|
||||
if (existence) {
|
||||
BooleanQuery boolFilter = new BooleanQuery();
|
||||
for (String field : fields) {
|
||||
FieldMapper mapper = parseContext.fieldMapper(field);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
Query filter = null;
|
||||
if (fieldNamesMapper != null && fieldNamesMapper.enabled()) {
|
||||
if (fieldNamesFieldType.isEnabled()) {
|
||||
final String f;
|
||||
if (mapper != null) {
|
||||
f = mapper.fieldType().names().indexName();
|
||||
if (fieldType != null) {
|
||||
f = fieldType.names().indexName();
|
||||
} else {
|
||||
f = field;
|
||||
}
|
||||
filter = fieldNamesMapper.termQuery(f, parseContext);
|
||||
filter = fieldNamesFieldType.termQuery(f, parseContext);
|
||||
}
|
||||
// if _field_names are not indexed, we need to go the slow way
|
||||
if (filter == null && mapper != null) {
|
||||
filter = mapper.rangeQuery(null, null, true, true, parseContext);
|
||||
if (filter == null && fieldType != null) {
|
||||
filter = fieldType.rangeQuery(null, null, true, true, parseContext);
|
||||
}
|
||||
if (filter == null) {
|
||||
filter = new TermRangeQuery(field, null, null, true, true);
|
||||
@ -137,9 +138,9 @@ public class MissingQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
if (nullValue) {
|
||||
for (String field : fields) {
|
||||
FieldMapper mapper = parseContext.fieldMapper(field);
|
||||
if (mapper != null) {
|
||||
nullFilter = mapper.nullValueFilter();
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
if (fieldType != null) {
|
||||
nullFilter = fieldType.nullValueQuery();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
@ -165,8 +165,8 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
|
||||
moreLikeFields = Lists.newLinkedList();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String field = parser.text();
|
||||
FieldMapper mapper = parseContext.fieldMapper(field);
|
||||
moreLikeFields.add(mapper == null ? field : mapper.fieldType().names().indexName());
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName());
|
||||
}
|
||||
} else if (Fields.DOCUMENT_IDS.match(currentFieldName, parseContext.parseFlags())) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -98,9 +98,9 @@ public class PrefixQueryParser extends BaseQueryParserTemp {
|
||||
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewriteMethod, null);
|
||||
|
||||
Query query = null;
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
query = mapper.prefixQuery(value, method, parseContext);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
query = fieldType.prefixQuery(value, method, parseContext);
|
||||
}
|
||||
if (query == null) {
|
||||
PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, BytesRefs.toBytesRef(value)));
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.queryparser.classic.MapperQueryParser;
|
||||
@ -179,7 +178,7 @@ public class QueryParseContext {
|
||||
return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter);
|
||||
}
|
||||
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper mapper) {
|
||||
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
|
||||
return indexQueryParser.fieldDataService.getForField(mapper);
|
||||
}
|
||||
|
||||
@ -297,8 +296,8 @@ public class QueryParseContext {
|
||||
return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes());
|
||||
}
|
||||
|
||||
public FieldMapper fieldMapper(String name) {
|
||||
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldMapper(name, getTypes()));
|
||||
public MappedFieldType fieldMapper(String name) {
|
||||
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes()));
|
||||
}
|
||||
|
||||
public MapperService.SmartNameObjectMapper smartObjectMapper(String name) {
|
||||
@ -308,9 +307,9 @@ public class QueryParseContext {
|
||||
/** Gets the search analyzer for the given field, or the default if there is none present for the field
|
||||
* TODO: remove this by moving defaults into mappers themselves
|
||||
*/
|
||||
public Analyzer getSearchAnalyzer(FieldMapper mapper) {
|
||||
if (mapper.fieldType().searchAnalyzer() != null) {
|
||||
return mapper.fieldType().searchAnalyzer();
|
||||
public Analyzer getSearchAnalyzer(MappedFieldType fieldType) {
|
||||
if (fieldType.searchAnalyzer() != null) {
|
||||
return fieldType.searchAnalyzer();
|
||||
}
|
||||
return mapperService().searchAnalyzer();
|
||||
}
|
||||
@ -318,9 +317,9 @@ public class QueryParseContext {
|
||||
/** Gets the search quote nalyzer for the given field, or the default if there is none present for the field
|
||||
* TODO: remove this by moving defaults into mappers themselves
|
||||
*/
|
||||
public Analyzer getSearchQuoteAnalyzer(FieldMapper mapper) {
|
||||
if (mapper.fieldType().searchQuoteAnalyzer() != null) {
|
||||
return mapper.fieldType().searchQuoteAnalyzer();
|
||||
public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) {
|
||||
if (fieldType.searchQuoteAnalyzer() != null) {
|
||||
return fieldType.searchQuoteAnalyzer();
|
||||
}
|
||||
return mapperService().searchQuoteAnalyzer();
|
||||
}
|
||||
@ -333,15 +332,14 @@ public class QueryParseContext {
|
||||
this.mapUnmappedFieldAsString = mapUnmappedFieldAsString;
|
||||
}
|
||||
|
||||
private FieldMapper failIfFieldMappingNotFound(String name, FieldMapper fieldMapping) {
|
||||
private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
|
||||
if (allowUnmappedFields) {
|
||||
return fieldMapping;
|
||||
} else if (mapUnmappedFieldAsString){
|
||||
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
|
||||
// it would be better to pass the real index settings, but they are not easily accessible from here...
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build();
|
||||
StringFieldMapper stringFieldMapper = builder.build(new Mapper.BuilderContext(settings, new ContentPath(1)));
|
||||
return stringFieldMapper;
|
||||
return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType();
|
||||
} else {
|
||||
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
|
||||
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
|
||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
@ -281,9 +281,9 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder<RangeQueryBuilder>
|
||||
@Override
|
||||
public Query toQuery(QueryParseContext parseContext) throws QueryParsingException, IOException {
|
||||
Query query = null;
|
||||
FieldMapper mapper = parseContext.fieldMapper(this.fieldName);
|
||||
MappedFieldType mapper = parseContext.fieldMapper(this.fieldName);
|
||||
if (mapper != null) {
|
||||
if (mapper instanceof DateFieldMapper) {
|
||||
if (mapper instanceof DateFieldMapper.DateFieldType) {
|
||||
DateMathParser forcedDateParser = null;
|
||||
if (this.format != null) {
|
||||
forcedDateParser = new DateMathParser(Joda.forPattern(this.format));
|
||||
@ -292,7 +292,7 @@ public class RangeQueryBuilder extends MultiTermQueryBuilder<RangeQueryBuilder>
|
||||
if (this.timeZone != null) {
|
||||
dateTimeZone = DateTimeZone.forID(this.timeZone);
|
||||
}
|
||||
query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, dateTimeZone, forcedDateParser, parseContext);
|
||||
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, dateTimeZone, forcedDateParser, parseContext);
|
||||
} else {
|
||||
if (timeZone != null) {
|
||||
throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field ["
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -106,9 +107,9 @@ public class RegexpQueryParser extends BaseQueryParserTemp {
|
||||
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewriteMethod, null);
|
||||
|
||||
Query query = null;
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
query = mapper.regexpQuery(value, flagsValue, maxDeterminizedStates, method, parseContext);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
query = fieldType.regexpQuery(value, flagsValue, maxDeterminizedStates, method, parseContext);
|
||||
}
|
||||
if (query == null) {
|
||||
RegexpQuery regexpQuery = new RegexpQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), flagsValue, maxDeterminizedStates);
|
||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.LocaleUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
@ -128,9 +129,9 @@ public class SimpleQueryStringParser extends BaseQueryParserTemp {
|
||||
fieldsAndWeights.put(fieldName, fBoost);
|
||||
}
|
||||
} else {
|
||||
FieldMapper mapper = parseContext.fieldMapper(fField);
|
||||
if (mapper != null) {
|
||||
fieldsAndWeights.put(mapper.fieldType().names().indexName(), fBoost);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fField);
|
||||
if (fieldType != null) {
|
||||
fieldsAndWeights.put(fieldType.names().indexName(), fBoost);
|
||||
} else {
|
||||
fieldsAndWeights.put(fField, fBoost);
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
/**
|
||||
* A Span Query that matches documents containing a term.
|
||||
@ -69,9 +69,9 @@ public class SpanTermQueryBuilder extends BaseTermQueryBuilder<SpanTermQueryBuil
|
||||
public Query toQuery(QueryParseContext context) {
|
||||
BytesRef valueBytes = null;
|
||||
String fieldName = this.fieldName;
|
||||
FieldMapper mapper = context.fieldMapper(fieldName);
|
||||
MappedFieldType mapper = context.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
fieldName = mapper.fieldType().names().indexName();
|
||||
fieldName = mapper.names().indexName();
|
||||
valueBytes = mapper.indexedValueForSearch(value);
|
||||
}
|
||||
if (valueBytes == null) {
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -23,7 +23,7 @@ import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
/**
|
||||
* A Query that matches documents containing a term.
|
||||
@ -71,7 +71,7 @@ public class TermQueryBuilder extends BaseTermQueryBuilder<TermQueryBuilder> imp
|
||||
@Override
|
||||
public Query toQuery(QueryParseContext parseContext) {
|
||||
Query query = null;
|
||||
FieldMapper mapper = parseContext.fieldMapper(this.fieldName);
|
||||
MappedFieldType mapper = parseContext.fieldMapper(this.fieldName);
|
||||
if (mapper != null) {
|
||||
query = mapper.termQuery(this.value, parseContext);
|
||||
}
|
||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.indices.cache.filter.terms.TermsLookup;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
@ -157,9 +158,9 @@ public class TermsQueryParser extends BaseQueryParserTemp {
|
||||
throw new QueryParsingException(parseContext, "terms query requires a field name, followed by array of terms");
|
||||
}
|
||||
|
||||
FieldMapper fieldMapper = parseContext.fieldMapper(fieldName);
|
||||
if (fieldMapper != null) {
|
||||
fieldName = fieldMapper.fieldType().names().indexName();
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
fieldName = fieldType.names().indexName();
|
||||
}
|
||||
|
||||
if (lookupId != null) {
|
||||
@ -179,8 +180,8 @@ public class TermsQueryParser extends BaseQueryParserTemp {
|
||||
|
||||
Query query;
|
||||
if (parseContext.isFilter()) {
|
||||
if (fieldMapper != null) {
|
||||
query = fieldMapper.termsQuery(terms, parseContext);
|
||||
if (fieldType != null) {
|
||||
query = fieldType.termsQuery(terms, parseContext);
|
||||
} else {
|
||||
BytesRef[] filterValues = new BytesRef[terms.size()];
|
||||
for (int i = 0; i < filterValues.length; i++) {
|
||||
@ -191,8 +192,8 @@ public class TermsQueryParser extends BaseQueryParserTemp {
|
||||
} else {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
for (Object term : terms) {
|
||||
if (fieldMapper != null) {
|
||||
bq.add(fieldMapper.termQuery(term, parseContext), Occur.SHOULD);
|
||||
if (fieldType != null) {
|
||||
bq.add(fieldType.termQuery(term, parseContext), Occur.SHOULD);
|
||||
} else {
|
||||
bq.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(term))), Occur.SHOULD);
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -91,10 +92,10 @@ public class WildcardQueryParser extends BaseQueryParserTemp {
|
||||
}
|
||||
|
||||
BytesRef valueBytes;
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
fieldName = mapper.fieldType().names().indexName();
|
||||
valueBytes = mapper.indexedValueForSearch(value);
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
fieldName = fieldType.names().indexName();
|
||||
valueBytes = fieldType.indexedValueForSearch(value);
|
||||
} else {
|
||||
valueBytes = new BytesRef(value);
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
@ -151,27 +151,27 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
|
||||
// now, the field must exist, else we cannot read the value for
|
||||
// the doc later
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "Unknown field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// dates and time need special handling
|
||||
parser.nextToken();
|
||||
if (mapper instanceof DateFieldMapper) {
|
||||
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper) mapper, mode);
|
||||
} else if (mapper instanceof GeoPointFieldMapper) {
|
||||
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper) mapper, mode);
|
||||
} else if (mapper instanceof NumberFieldMapper) {
|
||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper) mapper, mode);
|
||||
if (fieldType instanceof DateFieldMapper.DateFieldType) {
|
||||
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper.DateFieldType) fieldType, mode);
|
||||
} else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) {
|
||||
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
|
||||
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
|
||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Field " + fieldName + " is of type " + mapper.fieldType()
|
||||
throw new QueryParsingException(parseContext, "Field " + fieldName + " is of type " + fieldType
|
||||
+ ", but only numeric types are supported.");
|
||||
}
|
||||
}
|
||||
|
||||
private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
NumberFieldMapper mapper, MultiValueMode mode) throws IOException {
|
||||
NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
double scale = 0;
|
||||
@ -201,12 +201,12 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
throw new ElasticsearchParseException("Both " + DecayFunctionBuilder.SCALE + " and " + DecayFunctionBuilder.ORIGIN
|
||||
+ " must be set for numeric fields.");
|
||||
}
|
||||
IndexNumericFieldData numericFieldData = parseContext.getForField(mapper);
|
||||
IndexNumericFieldData numericFieldData = parseContext.getForField(fieldType);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||
}
|
||||
|
||||
private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
GeoPointFieldMapper mapper, MultiValueMode mode) throws IOException {
|
||||
GeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
GeoPoint origin = new GeoPoint();
|
||||
@ -233,13 +233,13 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
}
|
||||
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
|
||||
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
|
||||
|
||||
}
|
||||
|
||||
private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
DateFieldMapper dateFieldMapper, MultiValueMode mode) throws IOException {
|
||||
DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
String scaleString = null;
|
||||
@ -263,7 +263,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
}
|
||||
long origin = SearchContext.current().nowInMillis();
|
||||
if (originString != null) {
|
||||
origin = dateFieldMapper.fieldType().parseToMilliseconds(originString, false, null, null);
|
||||
origin = dateFieldType.parseToMilliseconds(originString, false, null, null);
|
||||
}
|
||||
|
||||
if (scaleString == null) {
|
||||
@ -273,7 +273,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
double scale = val.getMillis();
|
||||
val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset");
|
||||
double offset = val.getMillis();
|
||||
IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldMapper);
|
||||
IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldType);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||
}
|
||||
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
@ -84,12 +85,12 @@ public class FieldValueFactorFunctionParser implements ScoreFunctionParser {
|
||||
}
|
||||
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
FieldMapper mapper = searchContext.mapperService().smartNameFieldMapper(field);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = searchContext.mapperService().smartNameFieldType(field);
|
||||
if (fieldType == null) {
|
||||
throw new ElasticsearchException("Unable to find a field mapper for field [" + field + "]");
|
||||
}
|
||||
return new FieldValueFactorFunction(field, boostFactor, modifier, missing,
|
||||
(IndexNumericFieldData)searchContext.fieldData().getForField(mapper));
|
||||
(IndexNumericFieldData)searchContext.fieldData().getForField(fieldType));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
@ -82,8 +83,8 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
|
||||
}
|
||||
}
|
||||
|
||||
final FieldMapper mapper = SearchContext.current().mapperService().smartNameFieldMapper("_uid");
|
||||
if (mapper == null) {
|
||||
final MappedFieldType fieldType = SearchContext.current().mapperService().smartNameFieldType("_uid");
|
||||
if (fieldType == null) {
|
||||
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
|
||||
return new RandomScoreFunction();
|
||||
}
|
||||
@ -93,7 +94,7 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
|
||||
}
|
||||
final ShardId shardId = SearchContext.current().indexShard().shardId();
|
||||
final int salt = (shardId.index().name().hashCode() << 10) | shardId.id();
|
||||
final IndexFieldData<?> uidFieldData = SearchContext.current().fieldData().getForField(mapper);
|
||||
final IndexFieldData<?> uidFieldData = SearchContext.current().fieldData().getForField(fieldType);
|
||||
|
||||
return new RandomScoreFunction(seed, salt, uidFieldData);
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
@ -138,10 +139,10 @@ public class MatchQuery {
|
||||
return false;
|
||||
}
|
||||
|
||||
protected Analyzer getAnalyzer(FieldMapper mapper) {
|
||||
protected Analyzer getAnalyzer(MappedFieldType fieldType) {
|
||||
if (this.analyzer == null) {
|
||||
if (mapper != null) {
|
||||
return parseContext.getSearchAnalyzer(mapper);
|
||||
if (fieldType != null) {
|
||||
return parseContext.getSearchAnalyzer(fieldType);
|
||||
}
|
||||
return parseContext.mapperService().searchAnalyzer();
|
||||
} else {
|
||||
@ -155,16 +156,16 @@ public class MatchQuery {
|
||||
|
||||
public Query parse(Type type, String fieldName, Object value) throws IOException {
|
||||
final String field;
|
||||
FieldMapper mapper = parseContext.fieldMapper(fieldName);
|
||||
if (mapper != null) {
|
||||
field = mapper.fieldType().names().indexName();
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType != null) {
|
||||
field = fieldType.names().indexName();
|
||||
} else {
|
||||
field = fieldName;
|
||||
}
|
||||
|
||||
if (mapper != null && mapper.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) {
|
||||
if (fieldType != null && fieldType.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) {
|
||||
try {
|
||||
return mapper.termQuery(value, parseContext);
|
||||
return fieldType.termQuery(value, parseContext);
|
||||
} catch (RuntimeException e) {
|
||||
if (lenient) {
|
||||
return null;
|
||||
@ -173,9 +174,9 @@ public class MatchQuery {
|
||||
}
|
||||
|
||||
}
|
||||
Analyzer analyzer = getAnalyzer(mapper);
|
||||
Analyzer analyzer = getAnalyzer(fieldType);
|
||||
assert analyzer != null;
|
||||
MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, mapper);
|
||||
MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType);
|
||||
builder.setEnablePositionIncrements(this.enablePositionIncrements);
|
||||
|
||||
Query query = null;
|
||||
@ -184,7 +185,7 @@ public class MatchQuery {
|
||||
if (commonTermsCutoff == null) {
|
||||
query = builder.createBooleanQuery(field, value.toString(), occur);
|
||||
} else {
|
||||
query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff, mapper);
|
||||
query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff, fieldType);
|
||||
}
|
||||
break;
|
||||
case PHRASE:
|
||||
@ -210,11 +211,11 @@ public class MatchQuery {
|
||||
|
||||
private class MatchQueryBuilder extends QueryBuilder {
|
||||
|
||||
private final FieldMapper mapper;
|
||||
private final MappedFieldType mapper;
|
||||
/**
|
||||
* Creates a new QueryBuilder using the given analyzer.
|
||||
*/
|
||||
public MatchQueryBuilder(Analyzer analyzer, @Nullable FieldMapper mapper) {
|
||||
public MatchQueryBuilder(Analyzer analyzer, @Nullable MappedFieldType mapper) {
|
||||
super(analyzer);
|
||||
this.mapper = mapper;
|
||||
}
|
||||
@ -253,11 +254,11 @@ public class MatchQuery {
|
||||
return query;
|
||||
}
|
||||
|
||||
public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, FieldMapper mapper) {
|
||||
public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) {
|
||||
Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur);
|
||||
if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
|
||||
BooleanQuery bq = (BooleanQuery) booleanQuery;
|
||||
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), mapper);
|
||||
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), fieldType);
|
||||
for (BooleanClause clause : bq.clauses()) {
|
||||
if (!(clause.getQuery() instanceof TermQuery)) {
|
||||
return booleanQuery;
|
||||
@ -271,10 +272,10 @@ public class MatchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
protected Query blendTermQuery(Term term, FieldMapper mapper) {
|
||||
protected Query blendTermQuery(Term term, MappedFieldType fieldType) {
|
||||
if (fuzziness != null) {
|
||||
if (mapper != null) {
|
||||
Query query = mapper.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
|
||||
if (fieldType != null) {
|
||||
Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
|
||||
if (query instanceof FuzzyQuery) {
|
||||
QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod);
|
||||
}
|
||||
@ -284,8 +285,8 @@ public class MatchQuery {
|
||||
QueryParsers.setRewriteMethod(query, rewriteMethod);
|
||||
return query;
|
||||
}
|
||||
if (mapper != null) {
|
||||
Query termQuery = mapper.queryStringTermQuery(term);
|
||||
if (fieldType != null) {
|
||||
Query termQuery = fieldType.queryStringTermQuery(term);
|
||||
if (termQuery != null) {
|
||||
return termQuery;
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
@ -140,8 +141,8 @@ public class MultiMatchQuery extends MatchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
public Query blendTerm(Term term, FieldMapper mapper) {
|
||||
return MultiMatchQuery.super.blendTermQuery(term, mapper);
|
||||
public Query blendTerm(Term term, MappedFieldType fieldType) {
|
||||
return MultiMatchQuery.super.blendTermQuery(term, fieldType);
|
||||
}
|
||||
|
||||
public boolean forceAnalyzeQueryString() {
|
||||
@ -150,7 +151,7 @@ public class MultiMatchQuery extends MatchQuery {
|
||||
}
|
||||
|
||||
public class CrossFieldsQueryBuilder extends QueryBuilder {
|
||||
private FieldAndMapper[] blendedFields;
|
||||
private FieldAndFieldType[] blendedFields;
|
||||
|
||||
public CrossFieldsQueryBuilder(float tieBreaker) {
|
||||
super(false, tieBreaker);
|
||||
@ -158,20 +159,20 @@ public class MultiMatchQuery extends MatchQuery {
|
||||
|
||||
@Override
|
||||
public List<Query> buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException {
|
||||
Map<Analyzer, List<FieldAndMapper>> groups = new HashMap<>();
|
||||
Map<Analyzer, List<FieldAndFieldType>> groups = new HashMap<>();
|
||||
List<Tuple<String, Float>> missing = new ArrayList<>();
|
||||
for (Map.Entry<String, Float> entry : fieldNames.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
FieldMapper mapper = parseContext.fieldMapper(name);
|
||||
if (mapper != null) {
|
||||
Analyzer actualAnalyzer = getAnalyzer(mapper);
|
||||
name = mapper.fieldType().names().indexName();
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(name);
|
||||
if (fieldType != null) {
|
||||
Analyzer actualAnalyzer = getAnalyzer(fieldType);
|
||||
name = fieldType.names().indexName();
|
||||
if (!groups.containsKey(actualAnalyzer)) {
|
||||
groups.put(actualAnalyzer, new ArrayList<FieldAndMapper>());
|
||||
groups.put(actualAnalyzer, new ArrayList<FieldAndFieldType>());
|
||||
}
|
||||
Float boost = entry.getValue();
|
||||
boost = boost == null ? Float.valueOf(1.0f) : boost;
|
||||
groups.get(actualAnalyzer).add(new FieldAndMapper(name, mapper, boost));
|
||||
groups.get(actualAnalyzer).add(new FieldAndFieldType(name, fieldType, boost));
|
||||
} else {
|
||||
missing.add(new Tuple(name, entry.getValue()));
|
||||
}
|
||||
@ -184,18 +185,18 @@ public class MultiMatchQuery extends MatchQuery {
|
||||
queries.add(q);
|
||||
}
|
||||
}
|
||||
for (List<FieldAndMapper> group : groups.values()) {
|
||||
for (List<FieldAndFieldType> group : groups.values()) {
|
||||
if (group.size() > 1) {
|
||||
blendedFields = new FieldAndMapper[group.size()];
|
||||
blendedFields = new FieldAndFieldType[group.size()];
|
||||
int i = 0;
|
||||
for (FieldAndMapper fieldAndMapper : group) {
|
||||
blendedFields[i++] = fieldAndMapper;
|
||||
for (FieldAndFieldType fieldAndFieldType : group) {
|
||||
blendedFields[i++] = fieldAndFieldType;
|
||||
}
|
||||
} else {
|
||||
blendedFields = null;
|
||||
}
|
||||
final FieldAndMapper fieldAndMapper= group.get(0);
|
||||
Query q = parseGroup(type.matchQueryType(), fieldAndMapper.field, fieldAndMapper.boost, value, minimumShouldMatch);
|
||||
final FieldAndFieldType fieldAndFieldType = group.get(0);
|
||||
Query q = parseGroup(type.matchQueryType(), fieldAndFieldType.field, fieldAndFieldType.boost, value, minimumShouldMatch);
|
||||
if (q != null) {
|
||||
queries.add(q);
|
||||
}
|
||||
@ -210,9 +211,9 @@ public class MultiMatchQuery extends MatchQuery {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query blendTerm(Term term, FieldMapper mapper) {
|
||||
public Query blendTerm(Term term, MappedFieldType fieldType) {
|
||||
if (blendedFields == null) {
|
||||
return super.blendTerm(term, mapper);
|
||||
return super.blendTerm(term, fieldType);
|
||||
}
|
||||
final Term[] terms = new Term[blendedFields.length];
|
||||
float[] blendedBoost = new float[blendedFields.length];
|
||||
@ -232,28 +233,28 @@ public class MultiMatchQuery extends MatchQuery {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query blendTermQuery(Term term, FieldMapper mapper) {
|
||||
protected Query blendTermQuery(Term term, MappedFieldType fieldType) {
|
||||
if (queryBuilder == null) {
|
||||
return super.blendTermQuery(term, mapper);
|
||||
return super.blendTermQuery(term, fieldType);
|
||||
}
|
||||
return queryBuilder.blendTerm(term, mapper);
|
||||
return queryBuilder.blendTerm(term, fieldType);
|
||||
}
|
||||
|
||||
private static final class FieldAndMapper {
|
||||
private static final class FieldAndFieldType {
|
||||
final String field;
|
||||
final FieldMapper mapper;
|
||||
final MappedFieldType fieldType;
|
||||
final float boost;
|
||||
|
||||
|
||||
private FieldAndMapper(String field, FieldMapper mapper, float boost) {
|
||||
private FieldAndFieldType(String field, MappedFieldType fieldType, float boost) {
|
||||
this.field = field;
|
||||
this.mapper = mapper;
|
||||
this.fieldType = fieldType;
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
public Term newTerm(String value) {
|
||||
try {
|
||||
final BytesRef bytesRef = mapper.indexedValueForSearch(value);
|
||||
final BytesRef bytesRef = fieldType.indexedValueForSearch(value);
|
||||
return new Term(field, bytesRef);
|
||||
} catch (Exception ex) {
|
||||
// we can't parse it just use the incoming value -- it will
|
||||
|
@ -58,7 +58,7 @@ public class GeoDistanceRangeQuery extends Query {
|
||||
|
||||
private final IndexGeoPointFieldData indexFieldData;
|
||||
|
||||
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper mapper, IndexGeoPointFieldData indexFieldData,
|
||||
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper.GeoPointFieldType fieldType, IndexGeoPointFieldData indexFieldData,
|
||||
String optimizeBbox) {
|
||||
this.lat = point.lat();
|
||||
this.lon = point.lon();
|
||||
@ -91,7 +91,7 @@ public class GeoDistanceRangeQuery extends Query {
|
||||
if ("memory".equals(optimizeBbox)) {
|
||||
boundingBoxFilter = null;
|
||||
} else if ("indexed".equals(optimizeBbox)) {
|
||||
boundingBoxFilter = IndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(), distanceBoundingCheck.bottomRight(), mapper);
|
||||
boundingBoxFilter = IndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(), distanceBoundingCheck.bottomRight(), fieldType);
|
||||
distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
|
||||
} else {
|
||||
throw new IllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported");
|
||||
|
@ -30,31 +30,31 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
*/
|
||||
public class IndexedGeoBoundingBoxQuery {
|
||||
|
||||
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
|
||||
if (!fieldMapper.fieldType().isLatLonEnabled()) {
|
||||
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldMapper.name() + "], can't use indexed filter on it");
|
||||
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
|
||||
if (!fieldType.isLatLonEnabled()) {
|
||||
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.names().fullName() + "], can't use indexed filter on it");
|
||||
}
|
||||
//checks to see if bounding box crosses 180 degrees
|
||||
if (topLeft.lon() > bottomRight.lon()) {
|
||||
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldMapper);
|
||||
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
|
||||
} else {
|
||||
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldMapper);
|
||||
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
|
||||
}
|
||||
}
|
||||
|
||||
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
|
||||
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
|
||||
BooleanQuery filter = new BooleanQuery();
|
||||
filter.setMinimumNumberShouldMatch(1);
|
||||
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD);
|
||||
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD);
|
||||
filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
|
||||
return new ConstantScoreQuery(filter);
|
||||
}
|
||||
|
||||
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
|
||||
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
|
||||
BooleanQuery filter = new BooleanQuery();
|
||||
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST);
|
||||
filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
|
||||
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST);
|
||||
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
|
||||
return new ConstantScoreQuery(filter);
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
@ -99,8 +100,8 @@ public class SimilarityService extends AbstractIndexComponent {
|
||||
|
||||
@Override
|
||||
public Similarity get(String name) {
|
||||
FieldMapper mapper = mapperService.smartNameFieldMapper(name);
|
||||
return (mapper != null && mapper.fieldType().similarity() != null) ? mapper.fieldType().similarity().get() : defaultSimilarity;
|
||||
MappedFieldType fieldType = mapperService.smartNameFieldType(name);
|
||||
return (fieldType != null && fieldType.similarity() != null) ? fieldType.similarity().get() : defaultSimilarity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -47,6 +47,7 @@ import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
@ -185,13 +186,13 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
|
||||
request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY));
|
||||
}
|
||||
|
||||
private boolean isValidField(FieldMapper field) {
|
||||
private boolean isValidField(MappedFieldType fieldType) {
|
||||
// must be a string
|
||||
if (!(field instanceof StringFieldMapper)) {
|
||||
if (!(fieldType instanceof StringFieldMapper.StringFieldType)) {
|
||||
return false;
|
||||
}
|
||||
// and must be indexed
|
||||
if (field.fieldType().indexOptions() == IndexOptions.NONE) {
|
||||
if (fieldType.indexOptions() == IndexOptions.NONE) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -201,12 +202,12 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
|
||||
/* only keep valid fields */
|
||||
Set<String> validFields = new HashSet<>();
|
||||
for (String field : selectedFields) {
|
||||
FieldMapper fieldMapper = indexShard.mapperService().smartNameFieldMapper(field);
|
||||
if (!isValidField(fieldMapper)) {
|
||||
MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field);
|
||||
if (!isValidField(fieldType)) {
|
||||
continue;
|
||||
}
|
||||
// already retrieved, only if the analyzer hasn't been overridden at the field
|
||||
if (fieldMapper.fieldType().storeTermVectors() &&
|
||||
if (fieldType.storeTermVectors() &&
|
||||
(request.perFieldAnalyzer() == null || !request.perFieldAnalyzer().containsKey(field))) {
|
||||
continue;
|
||||
}
|
||||
@ -236,7 +237,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
|
||||
if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) {
|
||||
analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString());
|
||||
} else {
|
||||
analyzer = mapperService.smartNameFieldMapper(field).fieldType().indexAnalyzer();
|
||||
analyzer = mapperService.smartNameFieldType(field).indexAnalyzer();
|
||||
}
|
||||
if (analyzer == null) {
|
||||
analyzer = mapperService.analysisService().defaultIndexAnalyzer();
|
||||
@ -278,17 +279,17 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
|
||||
Collection<String> seenFields = new HashSet<>();
|
||||
Collection<GetField> getFields = new HashSet<>();
|
||||
for (IndexableField field : doc.getFields()) {
|
||||
FieldMapper fieldMapper = indexShard.mapperService().smartNameFieldMapper(field.name());
|
||||
MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field.name());
|
||||
if (seenFields.contains(field.name())) {
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
seenFields.add(field.name());
|
||||
}
|
||||
if (!isValidField(fieldMapper)) {
|
||||
if (!isValidField(fieldType)) {
|
||||
continue;
|
||||
}
|
||||
if (request.selectedFields() == null && !doAllFields && !fieldMapper.fieldType().storeTermVectors()) {
|
||||
if (request.selectedFields() == null && !doAllFields && !fieldType.storeTermVectors()) {
|
||||
continue;
|
||||
}
|
||||
if (request.selectedFields() != null && !request.selectedFields().contains(field.name())) {
|
||||
|
@ -196,7 +196,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLServ
|
||||
|
||||
private void purgeShards(List<IndexShard> shardsToPurge) {
|
||||
for (IndexShard shardToPurge : shardsToPurge) {
|
||||
Query query = shardToPurge.indexService().mapperService().smartNameFieldMapper(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true, null);
|
||||
Query query = shardToPurge.indexService().mapperService().smartNameFieldType(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true, null);
|
||||
Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
|
||||
try {
|
||||
logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id());
|
||||
|
@ -45,6 +45,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
@ -651,13 +652,13 @@ public class PercolateContext extends SearchContext {
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
return mapperService().smartNameFieldMapper(name, types);
|
||||
public MappedFieldType smartNameFieldType(String name) {
|
||||
return mapperService().smartNameFieldType(name, types);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper smartNameFieldMapperFromAnyType(String name) {
|
||||
return mapperService().smartNameFieldMapper(name);
|
||||
public MappedFieldType smartNameFieldTypeFromAnyType(String name) {
|
||||
return mapperService().smartNameFieldType(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -66,6 +66,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
@ -752,7 +753,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
hls = new ArrayList<>(topDocs.scoreDocs.length);
|
||||
}
|
||||
|
||||
final FieldMapper uidMapper = context.mapperService().smartNameFieldMapper(UidFieldMapper.NAME);
|
||||
final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME);
|
||||
final IndexFieldData<?> uidFieldData = context.fieldData().getForField(uidMapper);
|
||||
int i = 0;
|
||||
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
|
||||
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
@ -73,7 +74,7 @@ abstract class QueryCollector extends SimpleCollector {
|
||||
this.logger = logger;
|
||||
this.queries = context.percolateQueries();
|
||||
this.searcher = context.docSearcher();
|
||||
final FieldMapper uidMapper = context.mapperService().smartNameFieldMapper(UidFieldMapper.NAME);
|
||||
final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME);
|
||||
this.uidFieldData = context.fieldData().getForField(uidMapper);
|
||||
this.isNestedDoc = isNestedDoc;
|
||||
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
@ -152,21 +153,21 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
throw new ExpressionScriptCompilationException("Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()");
|
||||
}
|
||||
|
||||
FieldMapper field = mapper.smartNameFieldMapper(fieldname);
|
||||
MappedFieldType fieldType = mapper.smartNameFieldType(fieldname);
|
||||
|
||||
if (field == null) {
|
||||
if (fieldType == null) {
|
||||
throw new ExpressionScriptCompilationException("Field [" + fieldname + "] used in expression does not exist in mappings");
|
||||
}
|
||||
if (field.isNumeric() == false) {
|
||||
if (fieldType.isNumeric() == false) {
|
||||
// TODO: more context (which expression?)
|
||||
throw new ExpressionScriptCompilationException("Field [" + fieldname + "] used in expression must be numeric");
|
||||
}
|
||||
|
||||
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper)field);
|
||||
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper.NumberFieldType)fieldType);
|
||||
if (methodname == null) {
|
||||
bindings.add(variable, new FieldDataValueSource(fieldData, MultiValueMode.MIN));
|
||||
} else {
|
||||
bindings.add(variable, getMethodValueSource(field, fieldData, fieldname, methodname));
|
||||
bindings.add(variable, getMethodValueSource(fieldType, fieldData, fieldname, methodname));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,20 +175,20 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
return new ExpressionScript((Expression)compiledScript, bindings, specialValue);
|
||||
}
|
||||
|
||||
protected ValueSource getMethodValueSource(FieldMapper field, IndexFieldData<?> fieldData, String fieldName, String methodName) {
|
||||
protected ValueSource getMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName) {
|
||||
switch (methodName) {
|
||||
case GET_YEAR_METHOD:
|
||||
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.YEAR);
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.YEAR);
|
||||
case GET_MONTH_METHOD:
|
||||
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MONTH);
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MONTH);
|
||||
case GET_DAY_OF_MONTH_METHOD:
|
||||
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.DAY_OF_MONTH);
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.DAY_OF_MONTH);
|
||||
case GET_HOUR_OF_DAY_METHOD:
|
||||
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.HOUR_OF_DAY);
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.HOUR_OF_DAY);
|
||||
case GET_MINUTES_METHOD:
|
||||
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MINUTE);
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MINUTE);
|
||||
case GET_SECONDS_METHOD:
|
||||
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.SECOND);
|
||||
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.SECOND);
|
||||
case MINIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD:
|
||||
@ -205,8 +206,8 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
}
|
||||
}
|
||||
|
||||
protected ValueSource getDateMethodValueSource(FieldMapper field, IndexFieldData<?> fieldData, String fieldName, String methodName, int calendarType) {
|
||||
if (!(field instanceof DateFieldMapper)) {
|
||||
protected ValueSource getDateMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName, int calendarType) {
|
||||
if (!(fieldType instanceof DateFieldMapper.DateFieldType)) {
|
||||
throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "].");
|
||||
}
|
||||
|
||||
|
@ -59,6 +59,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.TemplateQueryParser;
|
||||
@ -921,7 +922,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
@Override
|
||||
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
|
||||
final MapperService mapperService = indexShard.mapperService();
|
||||
final Map<String, FieldMapper> warmUp = new HashMap<>();
|
||||
final Map<String, MappedFieldType> warmUp = new HashMap<>();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
for (FieldMapper fieldMapper : docMapper.mappers()) {
|
||||
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
|
||||
@ -936,26 +937,26 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
if (warmUp.containsKey(indexName)) {
|
||||
continue;
|
||||
}
|
||||
warmUp.put(indexName, fieldMapper);
|
||||
warmUp.put(indexName, fieldMapper.fieldType());
|
||||
}
|
||||
}
|
||||
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
|
||||
final Executor executor = threadPool.executor(executor());
|
||||
final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size());
|
||||
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
|
||||
for (final FieldMapper fieldMapper : warmUp.values()) {
|
||||
for (final MappedFieldType fieldType : warmUp.values()) {
|
||||
executor.execute(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
final long start = System.nanoTime();
|
||||
indexFieldDataService.getForField(fieldMapper).load(ctx);
|
||||
indexFieldDataService.getForField(fieldType).load(ctx);
|
||||
if (indexShard.warmerService().logger().isTraceEnabled()) {
|
||||
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
|
||||
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.fieldType().names().fullName());
|
||||
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.names().fullName());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
@ -975,7 +976,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
@Override
|
||||
public TerminationHandle warmTopReader(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
|
||||
final MapperService mapperService = indexShard.mapperService();
|
||||
final Map<String, FieldMapper> warmUpGlobalOrdinals = new HashMap<>();
|
||||
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
for (FieldMapper fieldMapper : docMapper.mappers()) {
|
||||
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
|
||||
@ -989,25 +990,25 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
if (warmUpGlobalOrdinals.containsKey(indexName)) {
|
||||
continue;
|
||||
}
|
||||
warmUpGlobalOrdinals.put(indexName, fieldMapper);
|
||||
warmUpGlobalOrdinals.put(indexName, fieldMapper.fieldType());
|
||||
}
|
||||
}
|
||||
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
|
||||
final Executor executor = threadPool.executor(executor());
|
||||
final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size());
|
||||
for (final FieldMapper fieldMapper : warmUpGlobalOrdinals.values()) {
|
||||
for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
final long start = System.nanoTime();
|
||||
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper);
|
||||
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType);
|
||||
ifd.loadGlobal(context.reader());
|
||||
if (indexShard.warmerService().logger().isTraceEnabled()) {
|
||||
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
|
||||
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.fieldType().names().fullName());
|
||||
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldType.names().fullName());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
|
@ -88,8 +88,8 @@ public class ChildrenParser implements Aggregator.Parser {
|
||||
// TODO: use the query API
|
||||
parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
|
||||
childFilter = new QueryWrapperFilter(childDocMapper.typeFilter());
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper);
|
||||
config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper));
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper.fieldType());
|
||||
config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper.fieldType()));
|
||||
} else {
|
||||
config.unmapped(true);
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lucene.index.FilterableTermsEnum;
|
||||
import org.elasticsearch.common.lucene.index.FreqTermsEnum;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
@ -131,7 +131,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||
private final IncludeExclude includeExclude;
|
||||
private final String executionHint;
|
||||
private String indexedFieldName;
|
||||
private FieldMapper mapper;
|
||||
private MappedFieldType fieldType;
|
||||
private FilterableTermsEnum termsEnum;
|
||||
private int numberOfAggregatorsCreated = 0;
|
||||
private final Query filter;
|
||||
@ -152,7 +152,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||
this.significanceHeuristic = significanceHeuristic;
|
||||
if (!valueSourceConfig.unmapped()) {
|
||||
this.indexedFieldName = config.fieldContext().field();
|
||||
mapper = SearchContext.current().smartNameFieldMapper(indexedFieldName);
|
||||
fieldType = SearchContext.current().smartNameFieldType(indexedFieldName);
|
||||
}
|
||||
this.filter = filter;
|
||||
}
|
||||
@ -266,7 +266,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
||||
|
||||
|
||||
public long getBackgroundFrequency(long term) {
|
||||
BytesRef indexedVal = mapper.indexedValueForSearch(term);
|
||||
BytesRef indexedVal = fieldType.indexedValueForSearch(term);
|
||||
return getBackgroundFrequency(indexedVal);
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public class CardinalityParser implements Aggregator.Parser {
|
||||
|
||||
ValuesSourceConfig<?> config = vsParser.config();
|
||||
|
||||
if (rehash == null && config.fieldContext() != null && config.fieldContext().mapper() instanceof Murmur3FieldMapper) {
|
||||
if (rehash == null && config.fieldContext() != null && config.fieldContext().fieldType() instanceof Murmur3FieldMapper.Murmur3FieldType) {
|
||||
rehash = false;
|
||||
} else if (rehash == null) {
|
||||
rehash = true;
|
||||
|
@ -35,7 +35,6 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -102,10 +101,10 @@ public class AggregationContext {
|
||||
if (config.missing instanceof Number) {
|
||||
missing = (Number) config.missing;
|
||||
} else {
|
||||
if (config.fieldContext != null && config.fieldContext.mapper() instanceof DateFieldMapper) {
|
||||
final DateFieldMapper mapper = (DateFieldMapper) config.fieldContext.mapper();
|
||||
if (config.fieldContext != null && config.fieldContext.fieldType() instanceof DateFieldMapper.DateFieldType) {
|
||||
final DateFieldMapper.DateFieldType fieldType = (DateFieldMapper.DateFieldType) config.fieldContext.fieldType();
|
||||
try {
|
||||
missing = mapper.fieldType().dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
|
||||
missing = fieldType.dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new SearchParseException(context, "Expected a date value in [missing] but got [" + config.missing + "]", null, e);
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.support;
|
||||
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
/**
|
||||
* Used by all field data based aggregators. This determine the context of the field data the aggregators are operating
|
||||
@ -29,7 +30,7 @@ public class FieldContext {
|
||||
|
||||
private final String field;
|
||||
private final IndexFieldData<?> indexFieldData;
|
||||
private final FieldMapper mapper;
|
||||
private final MappedFieldType fieldType;
|
||||
|
||||
/**
|
||||
* Constructs a field data context for the given field and its index field data
|
||||
@ -37,10 +38,10 @@ public class FieldContext {
|
||||
* @param field The name of the field
|
||||
* @param indexFieldData The index field data of the field
|
||||
*/
|
||||
public FieldContext(String field, IndexFieldData<?> indexFieldData, FieldMapper mapper) {
|
||||
public FieldContext(String field, IndexFieldData<?> indexFieldData, MappedFieldType fieldType) {
|
||||
this.field = field;
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.mapper = mapper;
|
||||
this.fieldType = fieldType;
|
||||
}
|
||||
|
||||
public String field() {
|
||||
@ -54,8 +55,8 @@ public class FieldContext {
|
||||
return indexFieldData;
|
||||
}
|
||||
|
||||
public FieldMapper mapper() {
|
||||
return mapper;
|
||||
public MappedFieldType fieldType() {
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
@ -169,8 +170,8 @@ public class ValuesSourceParser<VS extends ValuesSource> {
|
||||
return config;
|
||||
}
|
||||
|
||||
FieldMapper mapper = context.smartNameFieldMapperFromAnyType(input.field);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = context.smartNameFieldTypeFromAnyType(input.field);
|
||||
if (fieldType == null) {
|
||||
Class<VS> valuesSourceType = valueType != null ? (Class<VS>) valueType.getValuesSourceType() : this.valuesSourceType;
|
||||
ValuesSourceConfig<VS> config = new ValuesSourceConfig<>(valuesSourceType);
|
||||
config.missing = input.missing;
|
||||
@ -183,7 +184,7 @@ public class ValuesSourceParser<VS extends ValuesSource> {
|
||||
return config;
|
||||
}
|
||||
|
||||
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
|
||||
IndexFieldData<?> indexFieldData = context.fieldData().getForField(fieldType);
|
||||
|
||||
ValuesSourceConfig config;
|
||||
if (valuesSourceType == ValuesSource.class) {
|
||||
@ -198,10 +199,10 @@ public class ValuesSourceParser<VS extends ValuesSource> {
|
||||
config = new ValuesSourceConfig(valuesSourceType);
|
||||
}
|
||||
|
||||
config.fieldContext = new FieldContext(input.field, indexFieldData, mapper);
|
||||
config.fieldContext = new FieldContext(input.field, indexFieldData, fieldType);
|
||||
config.missing = input.missing;
|
||||
config.script = createScript();
|
||||
config.format = resolveFormat(input.format, mapper);
|
||||
config.format = resolveFormat(input.format, fieldType);
|
||||
return config;
|
||||
}
|
||||
|
||||
@ -220,17 +221,17 @@ public class ValuesSourceParser<VS extends ValuesSource> {
|
||||
return valueFormat;
|
||||
}
|
||||
|
||||
private static ValueFormat resolveFormat(@Nullable String format, FieldMapper mapper) {
|
||||
if (mapper instanceof DateFieldMapper) {
|
||||
return format != null ? ValueFormat.DateTime.format(format) : ValueFormat.DateTime.mapper((DateFieldMapper) mapper);
|
||||
private static ValueFormat resolveFormat(@Nullable String format, MappedFieldType fieldType) {
|
||||
if (fieldType instanceof DateFieldMapper.DateFieldType) {
|
||||
return format != null ? ValueFormat.DateTime.format(format) : ValueFormat.DateTime.mapper((DateFieldMapper.DateFieldType) fieldType);
|
||||
}
|
||||
if (mapper instanceof IpFieldMapper) {
|
||||
if (fieldType instanceof IpFieldMapper.IpFieldType) {
|
||||
return ValueFormat.IPv4;
|
||||
}
|
||||
if (mapper instanceof BooleanFieldMapper) {
|
||||
if (fieldType instanceof BooleanFieldMapper.BooleanFieldType) {
|
||||
return ValueFormat.BOOLEAN;
|
||||
}
|
||||
if (mapper instanceof NumberFieldMapper) {
|
||||
if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
|
||||
return format != null ? ValueFormat.Number.format(format) : ValueFormat.RAW;
|
||||
}
|
||||
return null;
|
||||
|
@ -71,8 +71,8 @@ public class ValueFormat {
|
||||
return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format));
|
||||
}
|
||||
|
||||
public static DateTime mapper(DateFieldMapper mapper) {
|
||||
return new DateTime(mapper.fieldType().dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper));
|
||||
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType) {
|
||||
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType), ValueParser.DateMath.mapper(fieldType));
|
||||
}
|
||||
|
||||
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {
|
||||
|
@ -104,8 +104,8 @@ public interface ValueFormatter extends Streamable {
|
||||
public static final ValueFormatter DEFAULT = new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER);
|
||||
private DateTimeZone timeZone = DateTimeZone.UTC;
|
||||
|
||||
public static DateTime mapper(DateFieldMapper mapper) {
|
||||
return new DateTime(mapper.fieldType().dateTimeFormatter());
|
||||
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType) {
|
||||
return new DateTime(fieldType.dateTimeFormatter());
|
||||
}
|
||||
|
||||
static final byte ID = 2;
|
||||
|
@ -108,8 +108,8 @@ public interface ValueParser {
|
||||
return parseLong(value, searchContext);
|
||||
}
|
||||
|
||||
public static DateMath mapper(DateFieldMapper mapper) {
|
||||
return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter()));
|
||||
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType) {
|
||||
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ import org.elasticsearch.index.fieldvisitor.JustUidFieldsVisitor;
|
||||
import org.elasticsearch.index.fieldvisitor.UidAndSourceFieldsVisitor;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMappers;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
@ -142,17 +142,17 @@ public class FetchPhase implements SearchPhase {
|
||||
}
|
||||
continue;
|
||||
}
|
||||
FieldMapper mapper = context.smartNameFieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
MappedFieldType fieldType = context.smartNameFieldType(fieldName);
|
||||
if (fieldType == null) {
|
||||
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
|
||||
if (context.smartNameObjectMapper(fieldName) != null) {
|
||||
throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field");
|
||||
}
|
||||
} else if (mapper.fieldType().stored()) {
|
||||
} else if (fieldType.stored()) {
|
||||
if (fieldNames == null) {
|
||||
fieldNames = new HashSet<>();
|
||||
}
|
||||
fieldNames.add(mapper.fieldType().names().indexName());
|
||||
fieldNames.add(fieldType.names().indexName());
|
||||
} else {
|
||||
if (extractFieldNames == null) {
|
||||
extractFieldNames = newArrayList();
|
||||
|
@ -19,11 +19,11 @@
|
||||
package org.elasticsearch.search.fetch.fielddata;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
@ -80,9 +80,9 @@ public class FieldDataFieldsFetchSubPhase implements FetchSubPhase {
|
||||
hitField = new InternalSearchHitField(field.name(), new ArrayList<>(2));
|
||||
hitContext.hit().fields().put(field.name(), hitField);
|
||||
}
|
||||
FieldMapper mapper = context.mapperService().smartNameFieldMapper(field.name());
|
||||
if (mapper != null) {
|
||||
AtomicFieldData data = context.fieldData().getForField(mapper).load(hitContext.readerContext());
|
||||
MappedFieldType fieldType = context.mapperService().smartNameFieldType(field.name());
|
||||
if (fieldType != null) {
|
||||
AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext());
|
||||
ScriptDocValues values = data.getScriptValues();
|
||||
values.setNextDocId(hitContext.docId());
|
||||
hitField.values().addAll(values.getValues());
|
||||
|
@ -50,7 +50,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMappers;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
@ -713,13 +713,13 @@ public class DefaultSearchContext extends SearchContext {
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
return mapperService().smartNameFieldMapper(name, request.types());
|
||||
public MappedFieldType smartNameFieldType(String name) {
|
||||
return mapperService().smartNameFieldType(name, request.types());
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper smartNameFieldMapperFromAnyType(String name) {
|
||||
return mapperService().smartNameFieldMapper(name);
|
||||
public MappedFieldType smartNameFieldTypeFromAnyType(String name) {
|
||||
return mapperService().smartNameFieldType(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user