Mappings: Make index level mapping apis use MappedFieldType

The MapperService is the "index wide view" of mappings. Methods on it
are used at query time to lookup how to query a field. This
change reduces the exposed api so that any information returned
is limited to that api exposed by MappedFieldType. In the future,
MappedFieldType will be guaranteed to be the same across all
document types for a given field.

Note CompletionFieldType needed some more settings moved to it. Other
than that, this change is almost purely cosmetic.
This commit is contained in:
Ryan Ernst 2015-06-09 09:56:39 -07:00
parent 9787266b60
commit 1fdae75025
104 changed files with 765 additions and 693 deletions

View File

@ -19,15 +19,12 @@
package org.apache.lucene.queries;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.FieldMapper;
import java.io.IOException;
import org.elasticsearch.index.mapper.MappedFieldType;
/**
* Extended version of {@link CommonTermsQuery} that allows to pass in a
@ -36,11 +33,11 @@ import java.io.IOException;
*/
public class ExtendedCommonTermsQuery extends CommonTermsQuery {
private final FieldMapper mapper;
private final MappedFieldType fieldType;
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, FieldMapper mapper) {
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, MappedFieldType fieldType) {
super(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord);
this.mapper = mapper;
this.fieldType = fieldType;
}
private String lowFreqMinNumShouldMatchSpec;
@ -81,10 +78,10 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery {
@Override
protected Query newTermQuery(Term term, TermContext context) {
if (mapper == null) {
if (fieldType == null) {
return super.newTermQuery(term, context);
}
final Query query = mapper.queryStringTermQuery(term);
final Query query = fieldType.queryStringTermQuery(term);
if (query == null) {
return super.newTermQuery(term, context);
} else {

View File

@ -21,7 +21,6 @@ package org.apache.lucene.queryparser.classic;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@ -37,7 +36,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
@ -77,7 +76,7 @@ public class MapperQueryParser extends QueryParser {
private boolean forcedAnalyzer;
private boolean forcedQuoteAnalyzer;
private FieldMapper currentMapper;
private MappedFieldType currentFieldType;
private boolean analyzeWildcard;
@ -148,8 +147,8 @@ public class MapperQueryParser extends QueryParser {
@Override
protected Query newTermQuery(Term term) {
if (currentMapper != null) {
Query termQuery = currentMapper.queryStringTermQuery(term);
if (currentFieldType != null) {
Query termQuery = currentFieldType.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}
@ -224,33 +223,33 @@ public class MapperQueryParser extends QueryParser {
return getRangeQuerySingle(field, null, queryText.substring(1), true, false);
}
}
currentMapper = null;
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
if (quoted) {
setAnalyzer(quoteAnalyzer);
if (quoteFieldSuffix != null) {
currentMapper = parseContext.fieldMapper(field + quoteFieldSuffix);
currentFieldType = parseContext.fieldMapper(field + quoteFieldSuffix);
}
}
if (currentMapper == null) {
currentMapper = parseContext.fieldMapper(field);
if (currentFieldType == null) {
currentFieldType = parseContext.fieldMapper(field);
}
if (currentMapper != null) {
if (currentFieldType != null) {
if (quoted) {
if (!forcedQuoteAnalyzer) {
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentMapper));
setAnalyzer(parseContext.getSearchQuoteAnalyzer(currentFieldType));
}
} else {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
}
if (currentMapper != null) {
if (currentFieldType != null) {
Query query = null;
if (currentMapper.useTermQueryWithQueryString()) {
if (currentFieldType.useTermQueryWithQueryString()) {
try {
query = currentMapper.termQuery(queryText, parseContext);
query = currentFieldType.termQuery(queryText, parseContext);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
@ -260,7 +259,7 @@ public class MapperQueryParser extends QueryParser {
}
}
if (query == null) {
query = super.getFieldQuery(currentMapper.fieldType().names().indexName(), queryText, quoted);
query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted);
}
return query;
}
@ -361,20 +360,20 @@ public class MapperQueryParser extends QueryParser {
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
currentMapper = parseContext.fieldMapper(field);
if (currentMapper != null) {
if (lowercaseExpandedTerms && !currentMapper.isNumeric()) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
part2 = part2 == null ? null : part2.toLowerCase(locale);
}
try {
Query rangeQuery;
if (currentMapper instanceof DateFieldMapper && settings.timeZone() != null) {
DateFieldMapper dateFieldMapper = (DateFieldMapper) this.currentMapper;
rangeQuery = dateFieldMapper.fieldType().rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext);
if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext);
} else {
rangeQuery = currentMapper.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext);
rangeQuery = currentFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext);
}
return rangeQuery;
} catch (RuntimeException e) {
@ -426,11 +425,11 @@ public class MapperQueryParser extends QueryParser {
}
private Query getFuzzyQuerySingle(String field, String termStr, String minSimilarity) throws ParseException {
currentMapper = parseContext.fieldMapper(field);
if (currentMapper!= null) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
try {
//LUCENE 4 UPGRADE I disabled transpositions here by default - maybe this needs to be changed
return currentMapper.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), false);
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), false);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
@ -495,20 +494,20 @@ public class MapperQueryParser extends QueryParser {
}
private Query getPrefixQuerySingle(String field, String termStr) throws ParseException {
currentMapper = null;
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentMapper = parseContext.fieldMapper(field);
if (currentMapper != null) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentMapper.useTermQueryWithQueryString()) {
query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentMapper.fieldType().names().indexName(), termStr);
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
}
return query;
}
@ -636,15 +635,15 @@ public class MapperQueryParser extends QueryParser {
private Query getWildcardQuerySingle(String field, String termStr) throws ParseException {
String indexedNameField = field;
currentMapper = null;
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentMapper = parseContext.fieldMapper(field);
if (currentMapper != null) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
indexedNameField = currentMapper.fieldType().names().indexName();
indexedNameField = currentFieldType.names().indexName();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
}
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
@ -768,17 +767,17 @@ public class MapperQueryParser extends QueryParser {
}
private Query getRegexpQuerySingle(String field, String termStr) throws ParseException {
currentMapper = null;
currentFieldType = null;
Analyzer oldAnalyzer = getAnalyzer();
try {
currentMapper = parseContext.fieldMapper(field);
if (currentMapper != null) {
currentFieldType = parseContext.fieldMapper(field);
if (currentFieldType != null) {
if (!forcedAnalyzer) {
setAnalyzer(parseContext.getSearchAnalyzer(currentMapper));
setAnalyzer(parseContext.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentMapper.useTermQueryWithQueryString()) {
query = currentMapper.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, parseContext);
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);

View File

@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.analysis.*;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.ShardId;
@ -108,13 +109,13 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction
if (indexService == null) {
throw new IllegalArgumentException("No index provided, and trying to analyzer based on a specific field which requires the index parameter");
}
FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper(request.field());
if (fieldMapper != null) {
if (fieldMapper.isNumeric()) {
MappedFieldType fieldType = indexService.mapperService().smartNameFieldType(request.field());
if (fieldType != null) {
if (fieldType.isNumeric()) {
throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
}
analyzer = fieldMapper.fieldType().indexAnalyzer();
field = fieldMapper.fieldType().names().indexName();
analyzer = fieldType.indexAnalyzer();
field = fieldType.names().indexName();
}
}

View File

@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
@ -135,12 +136,12 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
shard.readAllowed();
try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) {
for (String field : request.getFields()) {
FieldMapper fieldMapper = mapperService.fullName(field);
if (fieldMapper != null) {
MappedFieldType fieldType = mapperService.fullName(field);
if (fieldType != null) {
IndexReader reader = searcher.reader();
Terms terms = MultiFields.getTerms(reader, field);
if (terms != null) {
fieldStats.put(field, fieldMapper.stats(terms, reader.maxDoc()));
fieldStats.put(field, fieldType.stats(terms, reader.maxDoc()));
}
} else {
throw new IllegalArgumentException("field [" + field + "] doesn't exist");

View File

@ -26,6 +26,7 @@ import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
@ -54,14 +55,14 @@ public class PerFieldMappingPostingFormatCodec extends Lucene50Codec {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
final FieldMapper indexName = mapperService.indexName(field);
final MappedFieldType indexName = mapperService.indexName(field);
if (indexName == null) {
logger.warn("no index mapper found for field: [{}] returning default postings format", field);
} else if (indexName instanceof CompletionFieldMapper) {
} else if (indexName instanceof CompletionFieldMapper.CompletionFieldType) {
// CompletionFieldMapper needs a special postings format
final CompletionFieldMapper mapper = (CompletionFieldMapper) indexName;
final CompletionFieldMapper.CompletionFieldType fieldType = (CompletionFieldMapper.CompletionFieldType) indexName;
final PostingsFormat defaultFormat = super.getPostingsFormatForField(field);
return mapper.postingsFormat(defaultFormat);
return fieldType.postingsFormat(defaultFormat);
}
return super.getPostingsFormatForField(field);
}

View File

@ -229,7 +229,7 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
interface Builder {
IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService);
}

View File

@ -228,13 +228,13 @@ public class IndexFieldDataService extends AbstractIndexComponent {
}
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper mapper) {
final Names fieldNames = mapper.fieldType().names();
final FieldDataType type = mapper.fieldType().fieldDataType();
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
final Names fieldNames = fieldType.names();
final FieldDataType type = fieldType.fieldDataType();
if (type == null) {
throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]");
}
final boolean docValues = mapper.fieldType().hasDocValues();
final boolean docValues = fieldType.hasDocValues();
final String key = fieldNames.indexName();
IndexFieldData<?> fieldData = loadedFieldData.get(key);
if (fieldData == null) {
@ -279,7 +279,7 @@ public class IndexFieldDataService extends AbstractIndexComponent {
fieldDataCaches.put(fieldNames.indexName(), cache);
}
fieldData = builder.build(index, indexSettings, mapper, cache, circuitBreakerService, indexService.mapperService());
fieldData = builder.build(index, indexSettings, fieldType, cache, circuitBreakerService, indexService.mapperService());
loadedFieldData.put(fieldNames.indexName(), fieldData);
}
} finally {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -64,11 +65,11 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final Names fieldNames = mapper.fieldType().names();
return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
final Names fieldNames = fieldType.names();
return new BytesBinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
@ -39,10 +40,10 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<AtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
public IndexFieldData<AtomicFieldData> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
return new DisabledIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache);
return new DisabledIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache);
}
}

View File

@ -91,11 +91,11 @@ public abstract class DocValuesIndexFieldData {
}
@Override
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
final Names fieldNames = mapper.fieldType().names();
final Settings fdSettings = mapper.fieldType().fieldDataType().getSettings();
final Names fieldNames = fieldType.names();
final Settings fdSettings = fieldType.fieldDataType().getSettings();
final Map<String, Settings> filter = fdSettings.getGroups("filter");
if (filter != null && !filter.isEmpty()) {
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]");
@ -103,19 +103,19 @@ public abstract class DocValuesIndexFieldData {
if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
assert numericType == null;
return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
return new BinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
} else if (NUMERIC_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
assert !numericType.isFloatingPoint();
return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
return new NumericDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
} else if (numericType != null) {
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1)) {
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType());
return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
} else {
// prior to ES 1.4: multi-valued numerics were boxed inside a byte[] as BINARY
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType());
return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, fieldType.fieldDataType());
}
} else {
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldType().fieldDataType());
return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, fieldType.fieldDataType());
}
}

View File

@ -73,9 +73,9 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new DoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
return new DoubleArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
}
}

View File

@ -47,9 +47,9 @@ public class FSTBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new FSTBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
return new FSTBytesIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
}
}

View File

@ -72,9 +72,9 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<AtomicNumer
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new FloatArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
return new FloatArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
}
}

View File

@ -63,11 +63,11 @@ public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData impl
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final Names fieldNames = mapper.fieldType().names();
return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType());
final Names fieldNames = fieldType.names();
return new GeoPointBinaryDVIndexFieldData(index, fieldNames, fieldType.fieldDataType());
}
}

View File

@ -53,9 +53,9 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
FieldDataType type = mapper.fieldType().fieldDataType();
FieldDataType type = fieldType.fieldDataType();
final String precisionAsString = type.getSettings().get(PRECISION_KEY);
final Distance precision;
if (precisionAsString != null) {
@ -63,7 +63,7 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField
} else {
precision = DEFAULT_PRECISION_VALUE;
}
return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, precision, breakerService);
return new GeoPointCompressedIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, precision, breakerService);
}
}

View File

@ -47,9 +47,9 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
}
}

View File

@ -46,9 +46,9 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache,
public IndexFieldData<?> build(Index index, Settings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new IndexIndexFieldData(index, mapper.fieldType().names());
return new IndexIndexFieldData(index, fieldType.names());
}
}

View File

@ -85,9 +85,9 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
}
@Override
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
public IndexFieldData<AtomicNumericFieldData> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new PackedArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, numericType, breakerService);
return new PackedArrayIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, numericType, breakerService);
}
}

View File

@ -48,9 +48,9 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new PagedBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService);
return new PagedBytesIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
}
}

View File

@ -250,10 +250,10 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper,
public IndexFieldData<?> build(Index index, @IndexSettings Settings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService,
MapperService mapperService) {
return new ParentChildIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache,
return new ParentChildIndexFieldData(index, indexSettings, fieldType.names(), fieldType.fieldDataType(), cache,
mapperService, breakerService);
}
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
@ -61,13 +62,13 @@ public abstract class FieldsVisitor extends StoredFieldVisitor {
}
// can't derive exact mapping type
for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {
FieldMapper fieldMappers = mapperService.indexName(entry.getKey());
if (fieldMappers == null) {
MappedFieldType fieldType = mapperService.indexName(entry.getKey());
if (fieldType == null) {
continue;
}
List<Object> fieldValues = entry.getValue();
for (int i = 0; i < fieldValues.size(); i++) {
fieldValues.set(i, fieldMappers.valueForSearch(fieldValues.get(i)));
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
}
}
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.index.fieldvisitor;
import org.apache.lucene.index.FieldInfo;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -55,7 +56,7 @@ public class SingleFieldsVisitor extends FieldsVisitor {
super.reset();
}
public void postProcess(FieldMapper mapper) {
public void postProcess(MappedFieldType fieldType) {
if (uid != null) {
switch (field) {
case UidFieldMapper.NAME: addValue(field, uid.toString());
@ -67,12 +68,12 @@ public class SingleFieldsVisitor extends FieldsVisitor {
if (fieldsValues == null) {
return;
}
List<Object> fieldValues = fieldsValues.get(mapper.fieldType().names().indexName());
List<Object> fieldValues = fieldsValues.get(fieldType.names().indexName());
if (fieldValues == null) {
return;
}
for (int i = 0; i < fieldValues.size(); i++) {
fieldValues.set(i, mapper.valueForSearch(fieldValues.get(i)));
fieldValues.set(i, fieldType.valueForSearch(fieldValues.get(i)));
}
}
}

View File

@ -113,7 +113,7 @@ public class MapperService extends AbstractIndexComponent {
private final List<DocumentTypeListener> typeListeners = new CopyOnWriteArrayList<>();
private volatile ImmutableMap<String, FieldMapper> unmappedFieldMappers = ImmutableMap.of();
private volatile ImmutableMap<String, MappedFieldType> unmappedFieldTypes = ImmutableMap.of();
private volatile ImmutableSet<String> parentTypes = ImmutableSet.of();
@ -474,31 +474,29 @@ public class MapperService extends AbstractIndexComponent {
}
/**
* Returns an {@link FieldMapper} which has the given index name.
* Returns an {@link MappedFieldType} which has the given index name.
*
* If multiple types have fields with the same index name, the first is returned.
*/
public FieldMapper indexName(String indexName) {
public MappedFieldType indexName(String indexName) {
FieldMappers mappers = fieldMappers.indexName(indexName);
if (mappers == null) {
return null;
}
return mappers.mapper();
return mappers.mapper().fieldType();
}
/**
* Returns the {@link FieldMappers} of all the {@link FieldMapper}s that are
* registered under the give fullName across all the different {@link DocumentMapper} types.
* Returns the {@link MappedFieldType} for the give fullName.
*
* @param fullName The full name
* @return All teh {@link FieldMappers} across all the {@link DocumentMapper}s for the given fullName.
* If multiple types have fields with the same full name, the first is returned.
*/
public FieldMapper fullName(String fullName) {
public MappedFieldType fullName(String fullName) {
FieldMappers mappers = fieldMappers.fullName(fullName);
if (mappers == null) {
return null;
}
return mappers.mapper();
return mappers.mapper().fieldType();
}
/**
@ -563,17 +561,17 @@ public class MapperService extends AbstractIndexComponent {
return null;
}
public FieldMapper smartNameFieldMapper(String smartName) {
FieldMapper mapper = fullName(smartName);
if (mapper != null) {
return mapper;
public MappedFieldType smartNameFieldType(String smartName) {
MappedFieldType fieldType = fullName(smartName);
if (fieldType != null) {
return fieldType;
}
return indexName(smartName);
}
public FieldMapper smartNameFieldMapper(String smartName, @Nullable String[] types) {
public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) {
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
return smartNameFieldMapper(smartName);
return smartNameFieldType(smartName);
}
for (String type : types) {
DocumentMapper documentMapper = mappers.get(type);
@ -582,7 +580,7 @@ public class MapperService extends AbstractIndexComponent {
// see if we find a field for it
FieldMappers mappers = documentMapper.mappers().smartName(smartName);
if (mappers != null) {
return mappers.mapper();
return mappers.mapper().fieldType();
}
}
}
@ -592,10 +590,10 @@ public class MapperService extends AbstractIndexComponent {
/**
* Given a type (eg. long, string, ...), return an anonymous field mapper that can be used for search operations.
*/
public FieldMapper unmappedFieldMapper(String type) {
final ImmutableMap<String, FieldMapper> unmappedFieldMappers = this.unmappedFieldMappers;
FieldMapper mapper = unmappedFieldMappers.get(type);
if (mapper == null) {
public MappedFieldType unmappedFieldType(String type) {
final ImmutableMap<String, MappedFieldType> unmappedFieldMappers = this.unmappedFieldTypes;
MappedFieldType fieldType = unmappedFieldMappers.get(type);
if (fieldType == null) {
final Mapper.TypeParser.ParserContext parserContext = documentMapperParser().parserContext();
Mapper.TypeParser typeParser = parserContext.typeParser(type);
if (typeParser == null) {
@ -603,16 +601,16 @@ public class MapperService extends AbstractIndexComponent {
}
final Mapper.Builder<?, ?> builder = typeParser.parse("__anonymous_" + type, ImmutableMap.<String, Object>of(), parserContext);
final BuilderContext builderContext = new BuilderContext(indexSettings, new ContentPath(1));
mapper = (FieldMapper) builder.build(builderContext);
fieldType = ((FieldMapper)builder.build(builderContext)).fieldType();
// There is no need to synchronize writes here. In the case of concurrent access, we could just
// compute some mappers several times, which is not a big deal
this.unmappedFieldMappers = ImmutableMap.<String, FieldMapper>builder()
this.unmappedFieldTypes = ImmutableMap.<String, MappedFieldType>builder()
.putAll(unmappedFieldMappers)
.put(type, mapper)
.put(type, fieldType)
.build();
}
return mapper;
return fieldType;
}
public Analyzer searchAnalyzer() {
@ -702,9 +700,9 @@ public class MapperService extends AbstractIndexComponent {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
FieldMapper mapper = smartNameFieldMapper(fieldName);
if (mapper != null && mapper.fieldType().searchAnalyzer() != null) {
return mapper.fieldType().searchAnalyzer();
MappedFieldType fieldType = smartNameFieldType(fieldName);
if (fieldType != null && fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer();
}
return defaultAnalyzer;
}
@ -721,9 +719,9 @@ public class MapperService extends AbstractIndexComponent {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
FieldMapper mapper = smartNameFieldMapper(fieldName);
if (mapper != null && mapper.fieldType().searchQuoteAnalyzer() != null) {
return mapper.fieldType().searchQuoteAnalyzer();
MappedFieldType fieldType = smartNameFieldType(fieldName);
if (fieldType != null && fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer();
}
return defaultAnalyzer;
}

View File

@ -116,7 +116,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
}
}
static final class BooleanFieldType extends MappedFieldType {
public static final class BooleanFieldType extends MappedFieldType {
public BooleanFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);

View File

@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat;
import org.elasticsearch.search.suggest.completion.CompletionTokenStream;
@ -72,7 +71,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
public static final String CONTENT_TYPE = "completion";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final MappedFieldType FIELD_TYPE = new CompletionFieldType();
public static final CompletionFieldType FIELD_TYPE = new CompletionFieldType();
static {
FIELD_TYPE.setOmitNorms(true);
@ -149,8 +148,10 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
@Override
public CompletionFieldMapper build(Mapper.BuilderContext context) {
setupFieldType(context);
return new CompletionFieldMapper(fieldType, null, payloads,
preserveSeparators, preservePositionIncrements, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, this.contextMapping);
CompletionFieldType completionFieldType = (CompletionFieldType)fieldType;
completionFieldType.setProvider(new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads));
completionFieldType.setContextMapping(contextMapping);
return new CompletionFieldMapper(fieldType, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
@ -220,7 +221,10 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
}
static final class CompletionFieldType extends MappedFieldType {
public static final class CompletionFieldType extends MappedFieldType {
private PostingsFormat postingsFormat;
private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
public CompletionFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
@ -228,13 +232,46 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
protected CompletionFieldType(CompletionFieldType ref) {
super(ref);
this.postingsFormat = ref.postingsFormat;
this.analyzingSuggestLookupProvider = ref.analyzingSuggestLookupProvider;
this.contextMapping = ref.contextMapping;
}
@Override
public MappedFieldType clone() {
public CompletionFieldType clone() {
return new CompletionFieldType(this);
}
public void setProvider(AnalyzingCompletionLookupProvider provider) {
checkIfFrozen();
this.analyzingSuggestLookupProvider = provider;
}
public synchronized PostingsFormat postingsFormat(PostingsFormat in) {
if (in instanceof Completion090PostingsFormat) {
throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class);
}
if (postingsFormat == null) {
postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider);
}
return postingsFormat;
}
public void setContextMapping(SortedMap<String, ContextMapping> contextMapping) {
checkIfFrozen();
this.contextMapping = contextMapping;
}
/** Get the context mapping associated with this completion field */
public SortedMap<String, ContextMapping> getContextMapping() {
return contextMapping;
}
/** @return true if a context mapping has been defined */
public boolean requiresContext() {
return contextMapping.isEmpty() == false;
}
@Override
public String value(Object value) {
if (value == null) {
@ -251,46 +288,16 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
private static final BytesRef EMPTY = new BytesRef();
private PostingsFormat postingsFormat;
private final AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
private final boolean payloads;
private final boolean preservePositionIncrements;
private final boolean preserveSeparators;
private int maxInputLength;
private final SortedMap<String, ContextMapping> contextMapping;
/**
*
* @param contextMappings Configuration of context type. If none should be used set {@link ContextMapping.EMPTY_MAPPING}
* @param wrappedPostingsFormat the postings format to wrap, or {@code null} to wrap the codec's default postings format
*/
// Custom postings formats are deprecated but we still accept a postings format here to be able to test backward compatibility
// with older postings formats such as Elasticsearch090
public CompletionFieldMapper(MappedFieldType fieldType, PostingsFormat wrappedPostingsFormat, boolean payloads,
boolean preserveSeparators, boolean preservePositionIncrements, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, SortedMap<String, ContextMapping> contextMappings) {
public CompletionFieldMapper(MappedFieldType fieldType, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(fieldType, false, null, indexSettings, multiFields, copyTo);
analyzingSuggestLookupProvider = new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads);
if (wrappedPostingsFormat == null) {
// delayed until postingsFormat() is called
this.postingsFormat = null;
} else {
this.postingsFormat = new Completion090PostingsFormat(wrappedPostingsFormat, analyzingSuggestLookupProvider);
}
this.preserveSeparators = preserveSeparators;
this.payloads = payloads;
this.preservePositionIncrements = preservePositionIncrements;
this.maxInputLength = maxInputLength;
this.contextMapping = contextMappings;
}
public synchronized PostingsFormat postingsFormat(PostingsFormat in) {
if (in instanceof Completion090PostingsFormat) {
throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class);
}
if (postingsFormat == null) {
postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider);
}
return postingsFormat;
@Override
public CompletionFieldType fieldType() {
return (CompletionFieldType)fieldType;
}
@Override
@ -325,7 +332,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
if (token == Token.START_OBJECT) {
while ((token = parser.nextToken()) != Token.END_OBJECT) {
String name = parser.text();
ContextMapping mapping = contextMapping.get(name);
ContextMapping mapping = fieldType().getContextMapping().get(name);
if (mapping == null) {
throw new ElasticsearchParseException("context [" + name + "] is not defined");
} else {
@ -334,7 +341,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
}
contextConfig = Maps.newTreeMap();
for (ContextMapping mapping : contextMapping.values()) {
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
ContextConfig config = configs.get(mapping.name());
contextConfig.put(mapping.name(), config==null ? mapping.defaultConfig() : config);
}
@ -392,7 +399,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
if(contextConfig == null) {
contextConfig = Maps.newTreeMap();
for (ContextMapping mapping : contextMapping.values()) {
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
contextConfig.put(mapping.name(), mapping.defaultConfig());
}
}
@ -405,13 +412,13 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
if (input.length() == 0) {
continue;
}
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
input), weight, payload);
BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef(
input), weight, payload);
context.doc().add(getCompletionField(ctx, input, suggestPayload));
}
} else {
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
surfaceForm), weight, payload);
BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef(
surfaceForm), weight, payload);
for (String input : inputs) {
if (input.length() == 0) {
continue;
@ -428,22 +435,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
}
/**
* Get the context mapping associated with this completion field.
*/
public SortedMap<String, ContextMapping> getContextMapping() {
return contextMapping;
}
/** @return true if a context mapping has been defined */
public boolean requiresContext() {
return !contextMapping.isEmpty();
}
public Field getCompletionField(String input, BytesRef payload) {
return getCompletionField(ContextMapping.EMPTY_CONTEXT, input, payload);
}
public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) {
final String originalInput = input;
if (input.length() > maxInputLength) {
@ -457,7 +448,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
+ "] at position " + i + " is a reserved character");
}
}
return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider);
return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, fieldType().analyzingSuggestLookupProvider);
}
public static int correctSubStringLen(String input, int len) {
@ -469,8 +460,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException {
return analyzingSuggestLookupProvider.buildPayload(
surfaceForm, weight, payload);
return fieldType().analyzingSuggestLookupProvider.buildPayload(surfaceForm, weight, payload);
}
private static final class SuggestField extends Field {
@ -501,15 +491,15 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
if (fieldType.indexAnalyzer().name().equals(fieldType.searchAnalyzer().name()) == false) {
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType.searchAnalyzer().name());
}
builder.field(Fields.PAYLOADS, this.payloads);
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators);
builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), this.preservePositionIncrements);
builder.field(Fields.PAYLOADS, fieldType().analyzingSuggestLookupProvider.hasPayloads());
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreserveSep());
builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements());
builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength);
multiFields.toXContent(builder, params);
if(!contextMapping.isEmpty()) {
if(fieldType().requiresContext()) {
builder.startObject(Fields.CONTEXT);
for (ContextMapping mapping : contextMapping.values()) {
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
builder.value(mapping);
}
builder.endObject();
@ -538,23 +528,23 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
public boolean isStoringPayloads() {
return payloads;
return fieldType().analyzingSuggestLookupProvider.hasPayloads();
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
if (payloads != fieldMergeWith.payloads) {
if (fieldType().analyzingSuggestLookupProvider.hasPayloads() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.hasPayloads()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different payload values");
}
if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) {
if (fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_position_increments' values");
}
if (preserveSeparators != fieldMergeWith.preserveSeparators) {
if (fieldType().analyzingSuggestLookupProvider.getPreserveSep() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreserveSep()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_separators' values");
}
if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) {
if(!ContextMapping.mappingsAreEqual(fieldType().getContextMapping(), fieldMergeWith.fieldType().getContextMapping())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values");
}
if (!mergeResult.simulate()) {

View File

@ -47,6 +47,7 @@ public class Murmur3FieldMapper extends LongFieldMapper {
public static final String CONTENT_TYPE = "murmur3";
public static class Defaults extends LongFieldMapper.Defaults {
public static final MappedFieldType FIELD_TYPE = new Murmur3FieldType();
}
public static class Builder extends NumberFieldMapper.Builder<Builder, Murmur3FieldMapper> {
@ -104,6 +105,20 @@ public class Murmur3FieldMapper extends LongFieldMapper {
}
}
// this only exists so a check can be done to match the field type to using murmur3 hashing...
public static class Murmur3FieldType extends LongFieldMapper.LongFieldType {
public Murmur3FieldType() {}
protected Murmur3FieldType(Murmur3FieldType ref) {
super(ref);
}
@Override
public Murmur3FieldType clone() {
return new Murmur3FieldType(this);
}
}
protected Murmur3FieldMapper(MappedFieldType fieldType, Boolean docValues,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
@Nullable Settings fieldDataSettings,

View File

@ -184,7 +184,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
}
}
static final class StringFieldType extends MappedFieldType {
public static final class StringFieldType extends MappedFieldType {
public StringFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);

View File

@ -66,7 +66,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = FieldNamesFieldMapper.NAME;
public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_ENABLED;
public static final boolean ENABLED = true;
public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType();
static {
@ -82,7 +82,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
public static class Builder extends AbstractFieldMapper.Builder<Builder, FieldNamesFieldMapper> {
private EnabledAttributeMapper enabledState = Defaults.ENABLED_STATE;
private boolean enabled = Defaults.ENABLED;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
@ -97,14 +97,16 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
public Builder enabled(boolean enabled) {
this.enabledState = enabled ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED;
this.enabled = enabled;
return this;
}
@Override
public FieldNamesFieldMapper build(BuilderContext context) {
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new FieldNamesFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings());
setupFieldType(context);
FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldType)fieldType;
fieldNamesFieldType.setEnabled(enabled);
return new FieldNamesFieldMapper(fieldType, fieldDataSettings, context.indexSettings());
}
}
@ -133,7 +135,9 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
}
static final class FieldNamesFieldType extends MappedFieldType {
public static final class FieldNamesFieldType extends MappedFieldType {
private boolean enabled = Defaults.ENABLED;
public FieldNamesFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
@ -141,10 +145,20 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
protected FieldNamesFieldType(FieldNamesFieldType ref) {
super(ref);
this.enabled = ref.enabled;
}
public void setEnabled(boolean enabled) {
checkIfFrozen();
this.enabled = enabled;
}
public boolean isEnabled() {
return enabled;
}
@Override
public MappedFieldType clone() {
public FieldNamesFieldType clone() {
return new FieldNamesFieldType(this);
}
@ -163,22 +177,25 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
private final MappedFieldType defaultFieldType;
private EnabledAttributeMapper enabledState;
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
public FieldNamesFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings);
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
}
public FieldNamesFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) {
public FieldNamesFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
this.defaultFieldType = Defaults.FIELD_TYPE;
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
this.enabledState = enabledState;
}
@Override
public FieldNamesFieldType fieldType() {
return (FieldNamesFieldType)fieldType;
}
public boolean enabled() {
return pre13Index == false && enabledState.enabled;
return pre13Index == false && fieldType().isEnabled();
}
@Override
@ -240,7 +257,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (enabledState.enabled == false) {
if (fieldType().isEnabled() == false) {
return;
}
for (ParseContext.Document document : context.docs()) {
@ -270,13 +287,13 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
if (includeDefaults == false && fieldType().equals(Defaults.FIELD_TYPE) && enabledState == Defaults.ENABLED_STATE) {
if (includeDefaults == false && fieldType().equals(Defaults.FIELD_TYPE) && fieldType().isEnabled() == Defaults.ENABLED) {
return builder;
}
builder.startObject(NAME);
if (includeDefaults || enabledState != Defaults.ENABLED_STATE) {
builder.field("enabled", enabledState.enabled);
if (includeDefaults || fieldType().isEnabled() != Defaults.ENABLED) {
builder.field("enabled", fieldType().isEnabled());
}
if (indexCreatedBefore2x && (includeDefaults || fieldType().equals(Defaults.FIELD_TYPE) == false)) {
super.doXContentBody(builder, includeDefaults, params);
@ -290,8 +307,10 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
FieldNamesFieldMapper fieldNamesMapperMergeWith = (FieldNamesFieldMapper)mergeWith;
if (!mergeResult.simulate()) {
if (fieldNamesMapperMergeWith.enabledState != enabledState && !fieldNamesMapperMergeWith.enabledState.unset()) {
this.enabledState = fieldNamesMapperMergeWith.enabledState;
if (fieldNamesMapperMergeWith.fieldType().isEnabled() != fieldType().isEnabled()) {
this.fieldType = fieldType().clone();
fieldType().setEnabled(fieldNamesMapperMergeWith.fieldType().isEnabled());
fieldType().freeze();
}
}
}

View File

@ -157,7 +157,7 @@ public class IpFieldMapper extends NumberFieldMapper {
}
}
static final class IpFieldType extends NumberFieldType {
public static final class IpFieldType extends NumberFieldType {
public IpFieldType() {}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fieldvisitor.JustSourceFieldsVisitor;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -54,7 +55,7 @@ final class QueriesLoaderCollector extends SimpleCollector {
QueriesLoaderCollector(PercolatorQueriesRegistry percolator, ESLogger logger, MapperService mapperService, IndexFieldDataService indexFieldDataService) {
this.percolator = percolator;
this.logger = logger;
final FieldMapper uidMapper = mapperService.smartNameFieldMapper(UidFieldMapper.NAME);
final MappedFieldType uidMapper = mapperService.smartNameFieldType(UidFieldMapper.NAME);
this.uidFieldData = indexFieldDataService.getForField(uidMapper);
}

View File

@ -30,7 +30,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
@ -163,20 +163,20 @@ public class CommonTermsQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "No text specified for text query");
}
String field;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
field = mapper.fieldType().names().indexName();
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
field = fieldName;
}
Analyzer analyzer = null;
if (queryAnalyzer == null) {
if (mapper != null) {
analyzer = mapper.fieldType().searchAnalyzer();
if (fieldType != null) {
analyzer = fieldType.searchAnalyzer();
}
if (analyzer == null && mapper != null) {
analyzer = parseContext.getSearchAnalyzer(mapper);
if (analyzer == null && fieldType != null) {
analyzer = parseContext.getSearchAnalyzer(fieldType);
}
if (analyzer == null) {
analyzer = parseContext.mapperService().searchAnalyzer();
@ -188,7 +188,7 @@ public class CommonTermsQueryParser implements QueryParser {
}
}
ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoords, mapper);
ExtendedCommonTermsQuery commonsQuery = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoords, fieldType);
commonsQuery.setBoost(boost);
Query query = parseQueryString(commonsQuery, value.toString(), field, parseContext, analyzer, lowFreqMinimumShouldMatch, highFreqMinimumShouldMatch);
if (queryName != null) {

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
@ -77,7 +78,7 @@ public class ExistsQueryParser implements QueryParser {
}
public static Query newFilter(QueryParseContext parseContext, String fieldPattern, String queryName) {
final FieldNamesFieldMapper fieldNamesMapper = (FieldNamesFieldMapper)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern);
if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) {
@ -93,20 +94,20 @@ public class ExistsQueryParser implements QueryParser {
BooleanQuery boolFilter = new BooleanQuery();
for (String field : fields) {
FieldMapper mapper = parseContext.fieldMapper(field);
MappedFieldType fieldType = parseContext.fieldMapper(field);
Query filter = null;
if (fieldNamesMapper!= null && fieldNamesMapper.enabled()) {
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (mapper != null) {
f = mapper.fieldType().names().indexName();
if (fieldType != null) {
f = fieldType.names().indexName();
} else {
f = field;
}
filter = fieldNamesMapper.termQuery(f, parseContext);
filter = fieldNamesFieldType.termQuery(f, parseContext);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && mapper != null) {
filter = mapper.rangeQuery(null, null, true, true, parseContext);
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true, parseContext);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
@ -90,9 +91,9 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "field_masking_span must have [field] set for it");
}
FieldMapper mapper = parseContext.fieldMapper(field);
if (mapper != null) {
field = mapper.fieldType().names().indexName();
MappedFieldType fieldType = parseContext.fieldMapper(field);
if (fieldType != null) {
field = fieldType.names().indexName();
}
FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field);

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
@ -114,9 +115,9 @@ public class FuzzyQueryParser implements QueryParser {
}
Query query = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
query = mapper.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}
if (query == null) {
query = new FuzzyQuery(new Term(fieldName, value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions);

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
@ -160,20 +161,20 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
}
}
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(mapper instanceof GeoPointFieldMapper)) {
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
Query filter;
if ("indexed".equals(type)) {
filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoMapper);
filter = IndexedGeoBoundingBoxQuery.create(topLeft, bottomRight, geoFieldType);
} else if ("memory".equals(type)) {
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
} else {
throw new QueryParsingException(parseContext, "geo bounding box type [" + type

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
@ -147,18 +148,18 @@ public class GeoDistanceQueryParser implements QueryParser {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(mapper instanceof GeoPointFieldMapper)) {
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoMapper, indexFieldData, optimizeBbox);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(point, null, distance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
@ -187,17 +188,17 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(mapper instanceof GeoPointFieldMapper)) {
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoMapper, indexFieldData, optimizeBbox);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
Query query = new GeoDistanceRangeQuery(point, from, to, includeLower, includeUpper, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
@ -136,15 +137,15 @@ public class GeoPolygonQueryParser implements QueryParser {
}
}
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(mapper instanceof GeoPointFieldMapper)) {
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
Query query = new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);

View File

@ -32,6 +32,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.index.search.shape.ShapeFetchService;
import org.elasticsearch.search.internal.SearchContext;
@ -138,21 +139,21 @@ public class GeoShapeQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "No Shape Relation defined");
}
FieldMapper fieldMapper = parseContext.fieldMapper(fieldName);
if (fieldMapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]");
}
// TODO: This isn't the nicest way to check this
if (!(fieldMapper instanceof GeoShapeFieldMapper)) {
if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) {
throw new QueryParsingException(parseContext, "Field [" + fieldName + "] is not a geo_shape");
}
GeoShapeFieldMapper shapeFieldMapper = (GeoShapeFieldMapper) fieldMapper;
GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
PrefixTreeStrategy strategy = shapeFieldMapper.fieldType().defaultStrategy();
PrefixTreeStrategy strategy = shapeFieldType.defaultStrategy();
if (strategyName != null) {
strategy = shapeFieldMapper.fieldType().resolveStrategy(strategyName);
strategy = shapeFieldType.resolveStrategy(strategyName);
}
Query query;
if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) {

View File

@ -66,13 +66,13 @@ public class GeohashCellQuery {
* returns a boolean filter combining the geohashes OR-wise.
*
* @param context Context of the filter
* @param fieldMapper field mapper for geopoints
* @param fieldType field mapper for geopoints
* @param geohash mandatory geohash
* @param geohashes optional array of additional geohashes
* @return a new GeoBoundinboxfilter
*/
public static Query create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, @Nullable List<CharSequence> geohashes) {
MappedFieldType geoHashMapper = fieldMapper.fieldType().geohashFieldType();
public static Query create(QueryParseContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
MappedFieldType geoHashMapper = fieldType.geohashFieldType();
if (geoHashMapper == null) {
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
}
@ -237,17 +237,17 @@ public class GeohashCellQuery {
throw new QueryParsingException(parseContext, "no geohash value provided to geohash_cell filter");
}
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(mapper instanceof GeoPointFieldMapper)) {
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper);
if (!geoMapper.fieldType().isGeohashPrefixEnabled()) {
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeohashPrefixEnabled()) {
throw new QueryParsingException(parseContext, "can't execute geohash_cell on field [" + fieldName
+ "], geohash_prefix is not enabled");
}
@ -259,9 +259,9 @@ public class GeohashCellQuery {
Query filter;
if (neighbors) {
filter = create(parseContext, geoMapper, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
filter = create(parseContext, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
} else {
filter = create(parseContext, geoMapper, geohash, null);
filter = create(parseContext, geoFieldType, geohash, null);
}
return filter;

View File

@ -176,7 +176,7 @@ public class HasChildQueryParser implements QueryParser {
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
final Query query;
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
} else {

View File

@ -166,7 +166,7 @@ public class HasParentQueryParser implements QueryParser {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
parentTypes.add(parentFieldMapper.type());

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
@ -89,7 +90,7 @@ public class MissingQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "missing must have either existence, or null_value, or both set to true");
}
final FieldNamesFieldMapper fieldNamesMapper = (FieldNamesFieldMapper)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
MapperService.SmartNameObjectMapper smartNameObjectMapper = parseContext.smartObjectMapper(fieldPattern);
if (smartNameObjectMapper != null && smartNameObjectMapper.hasMapper()) {
// automatic make the object mapper pattern
@ -111,20 +112,20 @@ public class MissingQueryParser implements QueryParser {
if (existence) {
BooleanQuery boolFilter = new BooleanQuery();
for (String field : fields) {
FieldMapper mapper = parseContext.fieldMapper(field);
MappedFieldType fieldType = parseContext.fieldMapper(field);
Query filter = null;
if (fieldNamesMapper != null && fieldNamesMapper.enabled()) {
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (mapper != null) {
f = mapper.fieldType().names().indexName();
if (fieldType != null) {
f = fieldType.names().indexName();
} else {
f = field;
}
filter = fieldNamesMapper.termQuery(f, parseContext);
filter = fieldNamesFieldType.termQuery(f, parseContext);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && mapper != null) {
filter = mapper.rangeQuery(null, null, true, true, parseContext);
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true, parseContext);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);
@ -138,9 +139,9 @@ public class MissingQueryParser implements QueryParser {
if (nullValue) {
for (String field : fields) {
FieldMapper mapper = parseContext.fieldMapper(field);
if (mapper != null) {
nullFilter = mapper.nullValueFilter();
MappedFieldType fieldType = parseContext.fieldMapper(field);
if (fieldType != null) {
nullFilter = fieldType.nullValueQuery();
}
}
}

View File

@ -37,7 +37,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.Analysis;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService;
import org.elasticsearch.search.internal.SearchContext;
@ -166,8 +166,8 @@ public class MoreLikeThisQueryParser implements QueryParser {
moreLikeFields = Lists.newLinkedList();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String field = parser.text();
FieldMapper mapper = parseContext.fieldMapper(field);
moreLikeFields.add(mapper == null ? field : mapper.fieldType().names().indexName());
MappedFieldType fieldType = parseContext.fieldMapper(field);
moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName());
}
} else if (Fields.DOCUMENT_IDS.match(currentFieldName, parseContext.parseFlags())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
@ -100,9 +100,9 @@ public class PrefixQueryParser implements QueryParser {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewriteMethod, null);
Query query = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
query = mapper.prefixQuery(value, method, parseContext);
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.prefixQuery(value, method, parseContext);
}
if (query == null) {
PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, BytesRefs.toBytesRef(value)));

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryparser.classic.MapperQueryParser;
@ -179,7 +178,7 @@ public class QueryParseContext {
return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter);
}
public <IFD extends IndexFieldData<?>> IFD getForField(FieldMapper mapper) {
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {
return indexQueryParser.fieldDataService.getForField(mapper);
}
@ -280,8 +279,8 @@ public class QueryParseContext {
return indexQueryParser.mapperService.simpleMatchToIndexNames(pattern, getTypes());
}
public FieldMapper fieldMapper(String name) {
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldMapper(name, getTypes()));
public MappedFieldType fieldMapper(String name) {
return failIfFieldMappingNotFound(name, indexQueryParser.mapperService.smartNameFieldType(name, getTypes()));
}
public MapperService.SmartNameObjectMapper smartObjectMapper(String name) {
@ -291,9 +290,9 @@ public class QueryParseContext {
/** Gets the search analyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchAnalyzer(FieldMapper mapper) {
if (mapper.fieldType().searchAnalyzer() != null) {
return mapper.fieldType().searchAnalyzer();
public Analyzer getSearchAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer();
}
return mapperService().searchAnalyzer();
}
@ -301,9 +300,9 @@ public class QueryParseContext {
/** Gets the search quote nalyzer for the given field, or the default if there is none present for the field
* TODO: remove this by moving defaults into mappers themselves
*/
public Analyzer getSearchQuoteAnalyzer(FieldMapper mapper) {
if (mapper.fieldType().searchQuoteAnalyzer() != null) {
return mapper.fieldType().searchQuoteAnalyzer();
public Analyzer getSearchQuoteAnalyzer(MappedFieldType fieldType) {
if (fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer();
}
return mapperService().searchQuoteAnalyzer();
}
@ -316,15 +315,14 @@ public class QueryParseContext {
this.mapUnmappedFieldAsString = mapUnmappedFieldAsString;
}
private FieldMapper failIfFieldMappingNotFound(String name, FieldMapper fieldMapping) {
private MappedFieldType failIfFieldMappingNotFound(String name, MappedFieldType fieldMapping) {
if (allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString){
StringFieldMapper.Builder builder = MapperBuilders.stringField(name);
// it would be better to pass the real index settings, but they are not easily accessible from here...
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexQueryParser.getIndexCreatedVersion()).build();
StringFieldMapper stringFieldMapper = builder.build(new Mapper.BuilderContext(settings, new ContentPath(1)));
return stringFieldMapper;
return builder.build(new Mapper.BuilderContext(settings, new ContentPath(1))).fieldType();
} else {
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.joda.time.DateTimeZone;
@ -120,10 +121,10 @@ public class RangeQueryParser implements QueryParser {
}
Query query = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
MappedFieldType mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
if (mapper instanceof DateFieldMapper) {
query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext);
if (mapper instanceof DateFieldMapper.DateFieldType) {
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext);
} else {
if (timeZone != null) {
throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field ["

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
@ -108,9 +109,9 @@ public class RegexpQueryParser implements QueryParser {
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(rewriteMethod, null);
Query query = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
query = mapper.regexpQuery(value, flagsValue, maxDeterminizedStates, method, parseContext);
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.regexpQuery(value, flagsValue, maxDeterminizedStates, method, parseContext);
}
if (query == null) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(fieldName, BytesRefs.toBytesRef(value)), flagsValue, maxDeterminizedStates);

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Collections;
@ -130,9 +131,9 @@ public class SimpleQueryStringParser implements QueryParser {
fieldsAndWeights.put(fieldName, fBoost);
}
} else {
FieldMapper mapper = parseContext.fieldMapper(fField);
if (mapper != null) {
fieldsAndWeights.put(mapper.fieldType().names().indexName(), fBoost);
MappedFieldType fieldType = parseContext.fieldMapper(fField);
if (fieldType != null) {
fieldsAndWeights.put(fieldType.names().indexName(), fBoost);
} else {
fieldsAndWeights.put(fField, fBoost);
}

View File

@ -26,7 +26,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
@ -93,10 +93,10 @@ public class SpanTermQueryParser implements QueryParser {
}
BytesRef valueBytes = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
fieldName = mapper.fieldType().names().indexName();
valueBytes = mapper.indexedValueForSearch(value);
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
fieldName = fieldType.names().indexName();
valueBytes = fieldType.indexedValueForSearch(value);
}
if (valueBytes == null) {
valueBytes = new BytesRef(value);

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
@ -99,9 +100,9 @@ public class TermQueryParser implements QueryParser {
}
Query query = null;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
query = mapper.termQuery(value, parseContext);
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
query = fieldType.termQuery(value, parseContext);
}
if (query == null) {
query = new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(value)));

View File

@ -37,6 +37,7 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.indices.cache.filter.terms.TermsLookup;
import org.elasticsearch.search.internal.SearchContext;
@ -158,9 +159,9 @@ public class TermsQueryParser implements QueryParser {
throw new QueryParsingException(parseContext, "terms query requires a field name, followed by array of terms");
}
FieldMapper fieldMapper = parseContext.fieldMapper(fieldName);
if (fieldMapper != null) {
fieldName = fieldMapper.fieldType().names().indexName();
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
fieldName = fieldType.names().indexName();
}
if (lookupId != null) {
@ -180,8 +181,8 @@ public class TermsQueryParser implements QueryParser {
Query query;
if (parseContext.isFilter()) {
if (fieldMapper != null) {
query = fieldMapper.termsQuery(terms, parseContext);
if (fieldType != null) {
query = fieldType.termsQuery(terms, parseContext);
} else {
BytesRef[] filterValues = new BytesRef[terms.size()];
for (int i = 0; i < filterValues.length; i++) {
@ -192,8 +193,8 @@ public class TermsQueryParser implements QueryParser {
} else {
BooleanQuery bq = new BooleanQuery();
for (Object term : terms) {
if (fieldMapper != null) {
bq.add(fieldMapper.termQuery(term, parseContext), Occur.SHOULD);
if (fieldType != null) {
bq.add(fieldType.termQuery(term, parseContext), Occur.SHOULD);
} else {
bq.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(term))), Occur.SHOULD);
}

View File

@ -26,6 +26,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
@ -93,10 +94,10 @@ public class WildcardQueryParser implements QueryParser {
}
BytesRef valueBytes;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
fieldName = mapper.fieldType().names().indexName();
valueBytes = mapper.indexedValueForSearch(value);
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
fieldName = fieldType.names().indexName();
valueBytes = fieldType.indexedValueForSearch(value);
} else {
valueBytes = new BytesRef(value);
}

View File

@ -39,7 +39,7 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
@ -151,27 +151,27 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
// now, the field must exist, else we cannot read the value for
// the doc later
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "Unknown field [" + fieldName + "]");
}
// dates and time need special handling
parser.nextToken();
if (mapper instanceof DateFieldMapper) {
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper) mapper, mode);
} else if (mapper instanceof GeoPointFieldMapper) {
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper) mapper, mode);
} else if (mapper instanceof NumberFieldMapper) {
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper) mapper, mode);
if (fieldType instanceof DateFieldMapper.DateFieldType) {
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper.DateFieldType) fieldType, mode);
} else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) {
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode);
} else {
throw new QueryParsingException(parseContext, "Field " + fieldName + " is of type " + mapper.fieldType()
throw new QueryParsingException(parseContext, "Field " + fieldName + " is of type " + fieldType
+ ", but only numeric types are supported.");
}
}
private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
NumberFieldMapper mapper, MultiValueMode mode) throws IOException {
NumberFieldMapper.NumberFieldType fieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
double scale = 0;
@ -201,12 +201,12 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
throw new ElasticsearchParseException("Both " + DecayFunctionBuilder.SCALE + " and " + DecayFunctionBuilder.ORIGIN
+ " must be set for numeric fields.");
}
IndexNumericFieldData numericFieldData = parseContext.getForField(mapper);
IndexNumericFieldData numericFieldData = parseContext.getForField(fieldType);
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
}
private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
GeoPointFieldMapper mapper, MultiValueMode mode) throws IOException {
GeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
GeoPoint origin = new GeoPoint();
@ -233,13 +233,13 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
}
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(mapper);
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
}
private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
DateFieldMapper dateFieldMapper, MultiValueMode mode) throws IOException {
DateFieldMapper.DateFieldType dateFieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
String scaleString = null;
@ -263,7 +263,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
}
long origin = SearchContext.current().nowInMillis();
if (originString != null) {
origin = dateFieldMapper.fieldType().parseToMilliseconds(originString, false, null, null);
origin = dateFieldType.parseToMilliseconds(originString, false, null, null);
}
if (scaleString == null) {
@ -273,7 +273,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
double scale = val.getMillis();
val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24), getClass().getSimpleName() + ".offset");
double offset = val.getMillis();
IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldMapper);
IndexNumericFieldData numericFieldData = parseContext.getForField(dateFieldType);
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
@ -84,12 +85,12 @@ public class FieldValueFactorFunctionParser implements ScoreFunctionParser {
}
SearchContext searchContext = SearchContext.current();
FieldMapper mapper = searchContext.mapperService().smartNameFieldMapper(field);
if (mapper == null) {
MappedFieldType fieldType = searchContext.mapperService().smartNameFieldType(field);
if (fieldType == null) {
throw new ElasticsearchException("Unable to find a field mapper for field [" + field + "]");
}
return new FieldValueFactorFunction(field, boostFactor, modifier, missing,
(IndexNumericFieldData)searchContext.fieldData().getForField(mapper));
(IndexNumericFieldData)searchContext.fieldData().getForField(fieldType));
}
@Override

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
@ -82,8 +83,8 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
}
}
final FieldMapper mapper = SearchContext.current().mapperService().smartNameFieldMapper("_uid");
if (mapper == null) {
final MappedFieldType fieldType = SearchContext.current().mapperService().smartNameFieldType("_uid");
if (fieldType == null) {
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
return new RandomScoreFunction();
}
@ -93,7 +94,7 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
}
final ShardId shardId = SearchContext.current().indexShard().shardId();
final int salt = (shardId.index().name().hashCode() << 10) | shardId.id();
final IndexFieldData<?> uidFieldData = SearchContext.current().fieldData().getForField(mapper);
final IndexFieldData<?> uidFieldData = SearchContext.current().fieldData().getForField(fieldType);
return new RandomScoreFunction(seed, salt, uidFieldData);
}

View File

@ -30,6 +30,7 @@ import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.support.QueryParsers;
@ -138,10 +139,10 @@ public class MatchQuery {
return false;
}
protected Analyzer getAnalyzer(FieldMapper mapper) {
protected Analyzer getAnalyzer(MappedFieldType fieldType) {
if (this.analyzer == null) {
if (mapper != null) {
return parseContext.getSearchAnalyzer(mapper);
if (fieldType != null) {
return parseContext.getSearchAnalyzer(fieldType);
}
return parseContext.mapperService().searchAnalyzer();
} else {
@ -155,16 +156,16 @@ public class MatchQuery {
public Query parse(Type type, String fieldName, Object value) throws IOException {
final String field;
FieldMapper mapper = parseContext.fieldMapper(fieldName);
if (mapper != null) {
field = mapper.fieldType().names().indexName();
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
} else {
field = fieldName;
}
if (mapper != null && mapper.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) {
if (fieldType != null && fieldType.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) {
try {
return mapper.termQuery(value, parseContext);
return fieldType.termQuery(value, parseContext);
} catch (RuntimeException e) {
if (lenient) {
return null;
@ -173,9 +174,9 @@ public class MatchQuery {
}
}
Analyzer analyzer = getAnalyzer(mapper);
Analyzer analyzer = getAnalyzer(fieldType);
assert analyzer != null;
MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, mapper);
MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType);
builder.setEnablePositionIncrements(this.enablePositionIncrements);
Query query = null;
@ -184,7 +185,7 @@ public class MatchQuery {
if (commonTermsCutoff == null) {
query = builder.createBooleanQuery(field, value.toString(), occur);
} else {
query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff, mapper);
query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff, fieldType);
}
break;
case PHRASE:
@ -210,11 +211,11 @@ public class MatchQuery {
private class MatchQueryBuilder extends QueryBuilder {
private final FieldMapper mapper;
private final MappedFieldType mapper;
/**
* Creates a new QueryBuilder using the given analyzer.
*/
public MatchQueryBuilder(Analyzer analyzer, @Nullable FieldMapper mapper) {
public MatchQueryBuilder(Analyzer analyzer, @Nullable MappedFieldType mapper) {
super(analyzer);
this.mapper = mapper;
}
@ -253,11 +254,11 @@ public class MatchQuery {
return query;
}
public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, FieldMapper mapper) {
public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) {
Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur);
if (booleanQuery != null && booleanQuery instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) booleanQuery;
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), mapper);
ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), fieldType);
for (BooleanClause clause : bq.clauses()) {
if (!(clause.getQuery() instanceof TermQuery)) {
return booleanQuery;
@ -271,10 +272,10 @@ public class MatchQuery {
}
}
protected Query blendTermQuery(Term term, FieldMapper mapper) {
protected Query blendTermQuery(Term term, MappedFieldType fieldType) {
if (fuzziness != null) {
if (mapper != null) {
Query query = mapper.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
if (fieldType != null) {
Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions);
if (query instanceof FuzzyQuery) {
QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod);
}
@ -284,8 +285,8 @@ public class MatchQuery {
QueryParsers.setRewriteMethod(query, rewriteMethod);
return query;
}
if (mapper != null) {
Query termQuery = mapper.queryStringTermQuery(term);
if (fieldType != null) {
Query termQuery = fieldType.queryStringTermQuery(term);
if (termQuery != null) {
return termQuery;
}

View File

@ -30,6 +30,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
@ -140,8 +141,8 @@ public class MultiMatchQuery extends MatchQuery {
}
}
public Query blendTerm(Term term, FieldMapper mapper) {
return MultiMatchQuery.super.blendTermQuery(term, mapper);
public Query blendTerm(Term term, MappedFieldType fieldType) {
return MultiMatchQuery.super.blendTermQuery(term, fieldType);
}
public boolean forceAnalyzeQueryString() {
@ -150,7 +151,7 @@ public class MultiMatchQuery extends MatchQuery {
}
public class CrossFieldsQueryBuilder extends QueryBuilder {
private FieldAndMapper[] blendedFields;
private FieldAndFieldType[] blendedFields;
public CrossFieldsQueryBuilder(float tieBreaker) {
super(false, tieBreaker);
@ -158,20 +159,20 @@ public class MultiMatchQuery extends MatchQuery {
@Override
public List<Query> buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException {
Map<Analyzer, List<FieldAndMapper>> groups = new HashMap<>();
Map<Analyzer, List<FieldAndFieldType>> groups = new HashMap<>();
List<Tuple<String, Float>> missing = new ArrayList<>();
for (Map.Entry<String, Float> entry : fieldNames.entrySet()) {
String name = entry.getKey();
FieldMapper mapper = parseContext.fieldMapper(name);
if (mapper != null) {
Analyzer actualAnalyzer = getAnalyzer(mapper);
name = mapper.fieldType().names().indexName();
MappedFieldType fieldType = parseContext.fieldMapper(name);
if (fieldType != null) {
Analyzer actualAnalyzer = getAnalyzer(fieldType);
name = fieldType.names().indexName();
if (!groups.containsKey(actualAnalyzer)) {
groups.put(actualAnalyzer, new ArrayList<FieldAndMapper>());
groups.put(actualAnalyzer, new ArrayList<FieldAndFieldType>());
}
Float boost = entry.getValue();
boost = boost == null ? Float.valueOf(1.0f) : boost;
groups.get(actualAnalyzer).add(new FieldAndMapper(name, mapper, boost));
groups.get(actualAnalyzer).add(new FieldAndFieldType(name, fieldType, boost));
} else {
missing.add(new Tuple(name, entry.getValue()));
}
@ -184,18 +185,18 @@ public class MultiMatchQuery extends MatchQuery {
queries.add(q);
}
}
for (List<FieldAndMapper> group : groups.values()) {
for (List<FieldAndFieldType> group : groups.values()) {
if (group.size() > 1) {
blendedFields = new FieldAndMapper[group.size()];
blendedFields = new FieldAndFieldType[group.size()];
int i = 0;
for (FieldAndMapper fieldAndMapper : group) {
blendedFields[i++] = fieldAndMapper;
for (FieldAndFieldType fieldAndFieldType : group) {
blendedFields[i++] = fieldAndFieldType;
}
} else {
blendedFields = null;
}
final FieldAndMapper fieldAndMapper= group.get(0);
Query q = parseGroup(type.matchQueryType(), fieldAndMapper.field, fieldAndMapper.boost, value, minimumShouldMatch);
final FieldAndFieldType fieldAndFieldType = group.get(0);
Query q = parseGroup(type.matchQueryType(), fieldAndFieldType.field, fieldAndFieldType.boost, value, minimumShouldMatch);
if (q != null) {
queries.add(q);
}
@ -210,9 +211,9 @@ public class MultiMatchQuery extends MatchQuery {
}
@Override
public Query blendTerm(Term term, FieldMapper mapper) {
public Query blendTerm(Term term, MappedFieldType fieldType) {
if (blendedFields == null) {
return super.blendTerm(term, mapper);
return super.blendTerm(term, fieldType);
}
final Term[] terms = new Term[blendedFields.length];
float[] blendedBoost = new float[blendedFields.length];
@ -232,28 +233,28 @@ public class MultiMatchQuery extends MatchQuery {
}
@Override
protected Query blendTermQuery(Term term, FieldMapper mapper) {
protected Query blendTermQuery(Term term, MappedFieldType fieldType) {
if (queryBuilder == null) {
return super.blendTermQuery(term, mapper);
return super.blendTermQuery(term, fieldType);
}
return queryBuilder.blendTerm(term, mapper);
return queryBuilder.blendTerm(term, fieldType);
}
private static final class FieldAndMapper {
private static final class FieldAndFieldType {
final String field;
final FieldMapper mapper;
final MappedFieldType fieldType;
final float boost;
private FieldAndMapper(String field, FieldMapper mapper, float boost) {
private FieldAndFieldType(String field, MappedFieldType fieldType, float boost) {
this.field = field;
this.mapper = mapper;
this.fieldType = fieldType;
this.boost = boost;
}
public Term newTerm(String value) {
try {
final BytesRef bytesRef = mapper.indexedValueForSearch(value);
final BytesRef bytesRef = fieldType.indexedValueForSearch(value);
return new Term(field, bytesRef);
} catch (Exception ex) {
// we can't parse it just use the incoming value -- it will

View File

@ -58,7 +58,7 @@ public class GeoDistanceRangeQuery extends Query {
private final IndexGeoPointFieldData indexFieldData;
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper mapper, IndexGeoPointFieldData indexFieldData,
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper.GeoPointFieldType fieldType, IndexGeoPointFieldData indexFieldData,
String optimizeBbox) {
this.lat = point.lat();
this.lon = point.lon();
@ -91,7 +91,7 @@ public class GeoDistanceRangeQuery extends Query {
if ("memory".equals(optimizeBbox)) {
boundingBoxFilter = null;
} else if ("indexed".equals(optimizeBbox)) {
boundingBoxFilter = IndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(), distanceBoundingCheck.bottomRight(), mapper);
boundingBoxFilter = IndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(), distanceBoundingCheck.bottomRight(), fieldType);
distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
} else {
throw new IllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported");

View File

@ -30,31 +30,31 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
*/
public class IndexedGeoBoundingBoxQuery {
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
if (!fieldMapper.fieldType().isLatLonEnabled()) {
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldMapper.name() + "], can't use indexed filter on it");
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
if (!fieldType.isLatLonEnabled()) {
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.names().fullName() + "], can't use indexed filter on it");
}
//checks to see if bounding box crosses 180 degrees
if (topLeft.lon() > bottomRight.lon()) {
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldMapper);
return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
} else {
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldMapper);
return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType);
}
}
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
BooleanQuery filter = new BooleanQuery();
filter.setMinimumNumberShouldMatch(1);
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD);
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD);
filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD);
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD);
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
return new ConstantScoreQuery(filter);
}
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) {
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
BooleanQuery filter = new BooleanQuery();
filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST);
filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST);
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST);
return new ConstantScoreQuery(filter);
}
}

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
@ -99,8 +100,8 @@ public class SimilarityService extends AbstractIndexComponent {
@Override
public Similarity get(String name) {
FieldMapper mapper = mapperService.smartNameFieldMapper(name);
return (mapper != null && mapper.fieldType().similarity() != null) ? mapper.fieldType().similarity().get() : defaultSimilarity;
MappedFieldType fieldType = mapperService.smartNameFieldType(name);
return (fieldType != null && fieldType.similarity() != null) ? fieldType.similarity().get() : defaultSimilarity;
}
}
}

View File

@ -47,6 +47,7 @@ import org.elasticsearch.index.get.GetField;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParseContext;
@ -185,13 +186,13 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY));
}
private boolean isValidField(FieldMapper field) {
private boolean isValidField(MappedFieldType fieldType) {
// must be a string
if (!(field instanceof StringFieldMapper)) {
if (!(fieldType instanceof StringFieldMapper.StringFieldType)) {
return false;
}
// and must be indexed
if (field.fieldType().indexOptions() == IndexOptions.NONE) {
if (fieldType.indexOptions() == IndexOptions.NONE) {
return false;
}
return true;
@ -201,12 +202,12 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
/* only keep valid fields */
Set<String> validFields = new HashSet<>();
for (String field : selectedFields) {
FieldMapper fieldMapper = indexShard.mapperService().smartNameFieldMapper(field);
if (!isValidField(fieldMapper)) {
MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field);
if (!isValidField(fieldType)) {
continue;
}
// already retrieved, only if the analyzer hasn't been overridden at the field
if (fieldMapper.fieldType().storeTermVectors() &&
if (fieldType.storeTermVectors() &&
(request.perFieldAnalyzer() == null || !request.perFieldAnalyzer().containsKey(field))) {
continue;
}
@ -236,7 +237,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) {
analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString());
} else {
analyzer = mapperService.smartNameFieldMapper(field).fieldType().indexAnalyzer();
analyzer = mapperService.smartNameFieldType(field).indexAnalyzer();
}
if (analyzer == null) {
analyzer = mapperService.analysisService().defaultIndexAnalyzer();
@ -278,17 +279,17 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
Collection<String> seenFields = new HashSet<>();
Collection<GetField> getFields = new HashSet<>();
for (IndexableField field : doc.getFields()) {
FieldMapper fieldMapper = indexShard.mapperService().smartNameFieldMapper(field.name());
MappedFieldType fieldType = indexShard.mapperService().smartNameFieldType(field.name());
if (seenFields.contains(field.name())) {
continue;
}
else {
seenFields.add(field.name());
}
if (!isValidField(fieldMapper)) {
if (!isValidField(fieldType)) {
continue;
}
if (request.selectedFields() == null && !doAllFields && !fieldMapper.fieldType().storeTermVectors()) {
if (request.selectedFields() == null && !doAllFields && !fieldType.storeTermVectors()) {
continue;
}
if (request.selectedFields() != null && !request.selectedFields().contains(field.name())) {

View File

@ -196,7 +196,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent<IndicesTTLServ
private void purgeShards(List<IndexShard> shardsToPurge) {
for (IndexShard shardToPurge : shardsToPurge) {
Query query = shardToPurge.indexService().mapperService().smartNameFieldMapper(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true, null);
Query query = shardToPurge.indexService().mapperService().smartNameFieldType(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, true, null);
Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl");
try {
logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id());

View File

@ -45,6 +45,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.IndexQueryParserService;
@ -651,13 +652,13 @@ public class PercolateContext extends SearchContext {
}
@Override
public FieldMapper smartNameFieldMapper(String name) {
return mapperService().smartNameFieldMapper(name, types);
public MappedFieldType smartNameFieldType(String name) {
return mapperService().smartNameFieldType(name, types);
}
@Override
public FieldMapper smartNameFieldMapperFromAnyType(String name) {
return mapperService().smartNameFieldMapper(name);
public MappedFieldType smartNameFieldTypeFromAnyType(String name) {
return mapperService().smartNameFieldType(name);
}
@Override

View File

@ -66,6 +66,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParsedDocument;
@ -752,7 +753,7 @@ public class PercolatorService extends AbstractComponent {
hls = new ArrayList<>(topDocs.scoreDocs.length);
}
final FieldMapper uidMapper = context.mapperService().smartNameFieldMapper(UidFieldMapper.NAME);
final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME);
final IndexFieldData<?> uidFieldData = context.fieldData().getForField(uidMapper);
int i = 0;
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {

View File

@ -35,6 +35,7 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.ParsedQuery;
@ -73,7 +74,7 @@ abstract class QueryCollector extends SimpleCollector {
this.logger = logger;
this.queries = context.percolateQueries();
this.searcher = context.docSearcher();
final FieldMapper uidMapper = context.mapperService().smartNameFieldMapper(UidFieldMapper.NAME);
final MappedFieldType uidMapper = context.mapperService().smartNameFieldType(UidFieldMapper.NAME);
this.uidFieldData = context.fieldData().getForField(uidMapper);
this.isNestedDoc = isNestedDoc;

View File

@ -32,6 +32,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
@ -152,21 +153,21 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
throw new ExpressionScriptCompilationException("Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()");
}
FieldMapper field = mapper.smartNameFieldMapper(fieldname);
MappedFieldType fieldType = mapper.smartNameFieldType(fieldname);
if (field == null) {
if (fieldType == null) {
throw new ExpressionScriptCompilationException("Field [" + fieldname + "] used in expression does not exist in mappings");
}
if (field.isNumeric() == false) {
if (fieldType.isNumeric() == false) {
// TODO: more context (which expression?)
throw new ExpressionScriptCompilationException("Field [" + fieldname + "] used in expression must be numeric");
}
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper)field);
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper.NumberFieldType)fieldType);
if (methodname == null) {
bindings.add(variable, new FieldDataValueSource(fieldData, MultiValueMode.MIN));
} else {
bindings.add(variable, getMethodValueSource(field, fieldData, fieldname, methodname));
bindings.add(variable, getMethodValueSource(fieldType, fieldData, fieldname, methodname));
}
}
}
@ -174,20 +175,20 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
return new ExpressionScript((Expression)compiledScript, bindings, specialValue);
}
protected ValueSource getMethodValueSource(FieldMapper field, IndexFieldData<?> fieldData, String fieldName, String methodName) {
protected ValueSource getMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName) {
switch (methodName) {
case GET_YEAR_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.YEAR);
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.YEAR);
case GET_MONTH_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MONTH);
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MONTH);
case GET_DAY_OF_MONTH_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.DAY_OF_MONTH);
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.DAY_OF_MONTH);
case GET_HOUR_OF_DAY_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.HOUR_OF_DAY);
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.HOUR_OF_DAY);
case GET_MINUTES_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MINUTE);
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.MINUTE);
case GET_SECONDS_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.SECOND);
return getDateMethodValueSource(fieldType, fieldData, fieldName, methodName, Calendar.SECOND);
case MINIMUM_METHOD:
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
case MAXIMUM_METHOD:
@ -205,8 +206,8 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
}
}
protected ValueSource getDateMethodValueSource(FieldMapper field, IndexFieldData<?> fieldData, String fieldName, String methodName, int calendarType) {
if (!(field instanceof DateFieldMapper)) {
protected ValueSource getDateMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName, int calendarType) {
if (!(fieldType instanceof DateFieldMapper.DateFieldType)) {
throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "].");
}

View File

@ -59,6 +59,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.TemplateQueryParser;
@ -921,7 +922,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
@Override
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
final MapperService mapperService = indexShard.mapperService();
final Map<String, FieldMapper> warmUp = new HashMap<>();
final Map<String, MappedFieldType> warmUp = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
@ -936,26 +937,26 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
if (warmUp.containsKey(indexName)) {
continue;
}
warmUp.put(indexName, fieldMapper);
warmUp.put(indexName, fieldMapper.fieldType());
}
}
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
final Executor executor = threadPool.executor(executor());
final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size());
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
for (final FieldMapper fieldMapper : warmUp.values()) {
for (final MappedFieldType fieldType : warmUp.values()) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
final long start = System.nanoTime();
indexFieldDataService.getForField(fieldMapper).load(ctx);
indexFieldDataService.getForField(fieldType).load(ctx);
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.fieldType().names().fullName());
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.names().fullName());
} finally {
latch.countDown();
}
@ -975,7 +976,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
@Override
public TerminationHandle warmTopReader(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
final MapperService mapperService = indexShard.mapperService();
final Map<String, FieldMapper> warmUpGlobalOrdinals = new HashMap<>();
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
@ -989,25 +990,25 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
if (warmUpGlobalOrdinals.containsKey(indexName)) {
continue;
}
warmUpGlobalOrdinals.put(indexName, fieldMapper);
warmUpGlobalOrdinals.put(indexName, fieldMapper.fieldType());
}
}
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
final Executor executor = threadPool.executor(executor());
final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size());
for (final FieldMapper fieldMapper : warmUpGlobalOrdinals.values()) {
for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
final long start = System.nanoTime();
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper);
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType);
ifd.loadGlobal(context.reader());
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.fieldType().names().fullName());
indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldType.names().fullName());
} finally {
latch.countDown();
}

View File

@ -88,8 +88,8 @@ public class ChildrenParser implements Aggregator.Parser {
// TODO: use the query API
parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
childFilter = new QueryWrapperFilter(childDocMapper.typeFilter());
ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper);
config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper));
ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper.fieldType());
config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper.fieldType()));
} else {
config.unmapped(true);
}

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.index.FilterableTermsEnum;
import org.elasticsearch.common.lucene.index.FreqTermsEnum;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -131,7 +131,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
private final IncludeExclude includeExclude;
private final String executionHint;
private String indexedFieldName;
private FieldMapper mapper;
private MappedFieldType fieldType;
private FilterableTermsEnum termsEnum;
private int numberOfAggregatorsCreated = 0;
private final Query filter;
@ -152,7 +152,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
this.significanceHeuristic = significanceHeuristic;
if (!valueSourceConfig.unmapped()) {
this.indexedFieldName = config.fieldContext().field();
mapper = SearchContext.current().smartNameFieldMapper(indexedFieldName);
fieldType = SearchContext.current().smartNameFieldType(indexedFieldName);
}
this.filter = filter;
}
@ -266,7 +266,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
public long getBackgroundFrequency(long term) {
BytesRef indexedVal = mapper.indexedValueForSearch(term);
BytesRef indexedVal = fieldType.indexedValueForSearch(term);
return getBackgroundFrequency(indexedVal);
}

View File

@ -72,7 +72,7 @@ public class CardinalityParser implements Aggregator.Parser {
ValuesSourceConfig<?> config = vsParser.config();
if (rehash == null && config.fieldContext() != null && config.fieldContext().mapper() instanceof Murmur3FieldMapper) {
if (rehash == null && config.fieldContext() != null && config.fieldContext().fieldType() instanceof Murmur3FieldMapper.Murmur3FieldType) {
rehash = false;
} else if (rehash == null) {
rehash = true;

View File

@ -35,7 +35,6 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Date;
/**
*
@ -102,10 +101,10 @@ public class AggregationContext {
if (config.missing instanceof Number) {
missing = (Number) config.missing;
} else {
if (config.fieldContext != null && config.fieldContext.mapper() instanceof DateFieldMapper) {
final DateFieldMapper mapper = (DateFieldMapper) config.fieldContext.mapper();
if (config.fieldContext != null && config.fieldContext.fieldType() instanceof DateFieldMapper.DateFieldType) {
final DateFieldMapper.DateFieldType fieldType = (DateFieldMapper.DateFieldType) config.fieldContext.fieldType();
try {
missing = mapper.fieldType().dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
missing = fieldType.dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis();
} catch (IllegalArgumentException e) {
throw new SearchParseException(context, "Expected a date value in [missing] but got [" + config.missing + "]", null, e);
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.support;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
/**
* Used by all field data based aggregators. This determine the context of the field data the aggregators are operating
@ -29,7 +30,7 @@ public class FieldContext {
private final String field;
private final IndexFieldData<?> indexFieldData;
private final FieldMapper mapper;
private final MappedFieldType fieldType;
/**
* Constructs a field data context for the given field and its index field data
@ -37,10 +38,10 @@ public class FieldContext {
* @param field The name of the field
* @param indexFieldData The index field data of the field
*/
public FieldContext(String field, IndexFieldData<?> indexFieldData, FieldMapper mapper) {
public FieldContext(String field, IndexFieldData<?> indexFieldData, MappedFieldType fieldType) {
this.field = field;
this.indexFieldData = indexFieldData;
this.mapper = mapper;
this.fieldType = fieldType;
}
public String field() {
@ -54,8 +55,8 @@ public class FieldContext {
return indexFieldData;
}
public FieldMapper mapper() {
return mapper;
public MappedFieldType fieldType() {
return fieldType;
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
@ -169,8 +170,8 @@ public class ValuesSourceParser<VS extends ValuesSource> {
return config;
}
FieldMapper mapper = context.smartNameFieldMapperFromAnyType(input.field);
if (mapper == null) {
MappedFieldType fieldType = context.smartNameFieldTypeFromAnyType(input.field);
if (fieldType == null) {
Class<VS> valuesSourceType = valueType != null ? (Class<VS>) valueType.getValuesSourceType() : this.valuesSourceType;
ValuesSourceConfig<VS> config = new ValuesSourceConfig<>(valuesSourceType);
config.missing = input.missing;
@ -183,7 +184,7 @@ public class ValuesSourceParser<VS extends ValuesSource> {
return config;
}
IndexFieldData<?> indexFieldData = context.fieldData().getForField(mapper);
IndexFieldData<?> indexFieldData = context.fieldData().getForField(fieldType);
ValuesSourceConfig config;
if (valuesSourceType == ValuesSource.class) {
@ -198,10 +199,10 @@ public class ValuesSourceParser<VS extends ValuesSource> {
config = new ValuesSourceConfig(valuesSourceType);
}
config.fieldContext = new FieldContext(input.field, indexFieldData, mapper);
config.fieldContext = new FieldContext(input.field, indexFieldData, fieldType);
config.missing = input.missing;
config.script = createScript();
config.format = resolveFormat(input.format, mapper);
config.format = resolveFormat(input.format, fieldType);
return config;
}
@ -220,17 +221,17 @@ public class ValuesSourceParser<VS extends ValuesSource> {
return valueFormat;
}
private static ValueFormat resolveFormat(@Nullable String format, FieldMapper mapper) {
if (mapper instanceof DateFieldMapper) {
return format != null ? ValueFormat.DateTime.format(format) : ValueFormat.DateTime.mapper((DateFieldMapper) mapper);
private static ValueFormat resolveFormat(@Nullable String format, MappedFieldType fieldType) {
if (fieldType instanceof DateFieldMapper.DateFieldType) {
return format != null ? ValueFormat.DateTime.format(format) : ValueFormat.DateTime.mapper((DateFieldMapper.DateFieldType) fieldType);
}
if (mapper instanceof IpFieldMapper) {
if (fieldType instanceof IpFieldMapper.IpFieldType) {
return ValueFormat.IPv4;
}
if (mapper instanceof BooleanFieldMapper) {
if (fieldType instanceof BooleanFieldMapper.BooleanFieldType) {
return ValueFormat.BOOLEAN;
}
if (mapper instanceof NumberFieldMapper) {
if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
return format != null ? ValueFormat.Number.format(format) : ValueFormat.RAW;
}
return null;

View File

@ -71,8 +71,8 @@ public class ValueFormat {
return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format));
}
public static DateTime mapper(DateFieldMapper mapper) {
return new DateTime(mapper.fieldType().dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper));
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType) {
return new DateTime(fieldType.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(fieldType), ValueParser.DateMath.mapper(fieldType));
}
public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) {

View File

@ -104,8 +104,8 @@ public interface ValueFormatter extends Streamable {
public static final ValueFormatter DEFAULT = new ValueFormatter.DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER);
private DateTimeZone timeZone = DateTimeZone.UTC;
public static DateTime mapper(DateFieldMapper mapper) {
return new DateTime(mapper.fieldType().dateTimeFormatter());
public static DateTime mapper(DateFieldMapper.DateFieldType fieldType) {
return new DateTime(fieldType.dateTimeFormatter());
}
static final byte ID = 2;

View File

@ -108,8 +108,8 @@ public interface ValueParser {
return parseLong(value, searchContext);
}
public static DateMath mapper(DateFieldMapper mapper) {
return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter()));
public static DateMath mapper(DateFieldMapper.DateFieldType fieldType) {
return new DateMath(new DateMathParser(fieldType.dateTimeFormatter()));
}
}

View File

@ -45,7 +45,7 @@ import org.elasticsearch.index.fieldvisitor.JustUidFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.UidAndSourceFieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.search.SearchHit;
@ -142,17 +142,17 @@ public class FetchPhase implements SearchPhase {
}
continue;
}
FieldMapper mapper = context.smartNameFieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = context.smartNameFieldType(fieldName);
if (fieldType == null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
if (context.smartNameObjectMapper(fieldName) != null) {
throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field");
}
} else if (mapper.fieldType().stored()) {
} else if (fieldType.stored()) {
if (fieldNames == null) {
fieldNames = new HashSet<>();
}
fieldNames.add(mapper.fieldType().names().indexName());
fieldNames.add(fieldType.names().indexName());
} else {
if (extractFieldNames == null) {
extractFieldNames = newArrayList();

View File

@ -19,11 +19,11 @@
package org.elasticsearch.search.fetch.fielddata;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchSubPhase;
@ -80,9 +80,9 @@ public class FieldDataFieldsFetchSubPhase implements FetchSubPhase {
hitField = new InternalSearchHitField(field.name(), new ArrayList<>(2));
hitContext.hit().fields().put(field.name(), hitField);
}
FieldMapper mapper = context.mapperService().smartNameFieldMapper(field.name());
if (mapper != null) {
AtomicFieldData data = context.fieldData().getForField(mapper).load(hitContext.readerContext());
MappedFieldType fieldType = context.mapperService().smartNameFieldType(field.name());
if (fieldType != null) {
AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext());
ScriptDocValues values = data.getScriptValues();
values.setNextDocId(hitContext.docId());
hitField.values().addAll(values.getValues());

View File

@ -50,7 +50,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery;
@ -713,13 +713,13 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
public FieldMapper smartNameFieldMapper(String name) {
return mapperService().smartNameFieldMapper(name, request.types());
public MappedFieldType smartNameFieldType(String name) {
return mapperService().smartNameFieldType(name, request.types());
}
@Override
public FieldMapper smartNameFieldMapperFromAnyType(String name) {
return mapperService().smartNameFieldMapper(name);
public MappedFieldType smartNameFieldTypeFromAnyType(String name) {
return mapperService().smartNameFieldType(name);
}
@Override

View File

@ -35,7 +35,7 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery;
@ -532,13 +532,13 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
public FieldMapper smartNameFieldMapper(String name) {
return in.smartNameFieldMapper(name);
public MappedFieldType smartNameFieldType(String name) {
return in.smartNameFieldType(name);
}
@Override
public FieldMapper smartNameFieldMapperFromAnyType(String name) {
return in.smartNameFieldMapperFromAnyType(name);
public MappedFieldType smartNameFieldTypeFromAnyType(String name) {
return in.smartNameFieldTypeFromAnyType(name);
}
@Override

View File

@ -21,16 +21,13 @@ package org.elasticsearch.search.internal;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.common.HasContext;
import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.HasHeaders;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
@ -38,8 +35,7 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery;
@ -343,12 +339,12 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
public abstract ScanContext scanContext();
public abstract FieldMapper smartNameFieldMapper(String name);
public abstract MappedFieldType smartNameFieldType(String name);
/**
* Looks up the given field, but does not restrict to fields in the types set on this context.
*/
public abstract FieldMapper smartNameFieldMapperFromAnyType(String name);
public abstract MappedFieldType smartNameFieldTypeFromAnyType(String name);
public abstract MapperService.SmartNameObjectMapper smartNameObjectMapper(String name);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.lookup;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.util.ArrayList;
import java.util.List;
@ -29,8 +30,8 @@ import java.util.Map;
*/
public class FieldLookup {
// we can cached mapper completely per name, since its on an index/shard level (the lookup, and it does not change within the scope of a search request)
private final FieldMapper mapper;
// we can cached fieldType completely per name, since its on an index/shard level (the lookup, and it does not change within the scope of a search request)
private final MappedFieldType fieldType;
private Map<String, List<Object>> fields;
@ -42,12 +43,12 @@ public class FieldLookup {
private boolean valuesLoaded = false;
FieldLookup(FieldMapper mapper) {
this.mapper = mapper;
FieldLookup(MappedFieldType fieldType) {
this.fieldType = fieldType;
}
public FieldMapper mapper() {
return mapper;
public MappedFieldType fieldType() {
return fieldType;
}
public Map<String, List<Object>> fields() {
@ -85,7 +86,7 @@ public class FieldLookup {
}
valueLoaded = true;
value = null;
List<Object> values = fields.get(mapper.fieldType().names().indexName());
List<Object> values = fields.get(fieldType.names().indexName());
return values != null ? value = values.get(0) : null;
}
@ -95,6 +96,6 @@ public class FieldLookup {
}
valuesLoaded = true;
values.clear();
return values = fields().get(mapper.fieldType().names().indexName());
return values = fields().get(fieldType.names().indexName());
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.apache.lucene.index.LeafReaderContext;
@ -76,8 +77,8 @@ public class LeafDocLookup implements Map {
String fieldName = key.toString();
ScriptDocValues scriptValues = localCacheFieldData.get(fieldName);
if (scriptValues == null) {
final FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName, types);
if (mapper == null) {
final MappedFieldType fieldType = mapperService.smartNameFieldType(fieldName, types);
if (fieldType == null) {
throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping with types " + Arrays.toString(types) + "");
}
// load fielddata on behalf of the script: otherwise it would need additional permissions
@ -85,7 +86,7 @@ public class LeafDocLookup implements Map {
scriptValues = AccessController.doPrivileged(new PrivilegedAction<ScriptDocValues>() {
@Override
public ScriptDocValues run() {
return fieldDataService.getForField(mapper).load(reader).getScriptValues();
return fieldDataService.getForField(fieldType).load(reader).getScriptValues();
}
});
localCacheFieldData.put(fieldName, scriptValues);
@ -100,8 +101,8 @@ public class LeafDocLookup implements Map {
String fieldName = key.toString();
ScriptDocValues scriptValues = localCacheFieldData.get(fieldName);
if (scriptValues == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName, types);
if (mapper == null) {
MappedFieldType fieldType = mapperService.smartNameFieldType(fieldName, types);
if (fieldType == null) {
return false;
}
}

View File

@ -25,7 +25,7 @@ import org.apache.lucene.index.LeafReader;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
@ -136,20 +136,20 @@ public class LeafFieldsLookup implements Map {
private FieldLookup loadFieldData(String name) {
FieldLookup data = cachedFieldData.get(name);
if (data == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(name, types);
if (mapper == null) {
MappedFieldType fieldType = mapperService.smartNameFieldType(name, types);
if (fieldType == null) {
throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types) + "");
}
data = new FieldLookup(mapper);
data = new FieldLookup(fieldType);
cachedFieldData.put(name, data);
}
if (data.fields() == null) {
String fieldName = data.mapper().fieldType().names().indexName();
String fieldName = data.fieldType().names().indexName();
fieldVisitor.reset(fieldName);
try {
reader.document(docId, fieldVisitor);
fieldVisitor.postProcess(data.mapper());
data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.mapper().fieldType().names().indexName())));
fieldVisitor.postProcess(data.fieldType());
data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.fieldType().names().indexName())));
} catch (IOException e) {
throw new ElasticsearchParseException("failed to load field [" + name + "]", e);
}

View File

@ -43,6 +43,7 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.search.MultiValueMode;
@ -139,12 +140,12 @@ public class GeoDistanceSortParser implements SortParser {
throw new IllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance");
}
FieldMapper mapper = context.smartNameFieldMapper(fieldName);
if (mapper == null) {
MappedFieldType fieldType = context.smartNameFieldType(fieldName);
if (fieldType == null) {
throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort");
}
final MultiValueMode finalSortMode = sortMode; // final reference for use in the anonymous class
final IndexGeoPointFieldData geoIndexFieldData = context.fieldData().getForField(mapper);
final IndexGeoPointFieldData geoIndexFieldData = context.fieldData().getForField(fieldType);
final FixedSourceDistance[] distances = new FixedSourceDistance[geoPoints.size()];
for (int i = 0; i< geoPoints.size(); i++) {
distances[i] = geoDistance.fixedSourceDistance(geoPoints.get(i).lat(), geoPoints.get(i).lon(), unit);

View File

@ -33,9 +33,8 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.search.MultiValueMode;
@ -208,16 +207,16 @@ public class SortParseElement implements SearchParseElement {
sortFields.add(SORT_DOC);
}
} else {
FieldMapper fieldMapper = context.smartNameFieldMapper(fieldName);
if (fieldMapper == null) {
MappedFieldType fieldType = context.smartNameFieldType(fieldName);
if (fieldType == null) {
if (unmappedType != null) {
fieldMapper = context.mapperService().unmappedFieldMapper(unmappedType);
fieldType = context.mapperService().unmappedFieldType(unmappedType);
} else {
throw new SearchParseException(context, "No mapping found for [" + fieldName + "] in order to sort on", null);
}
}
if (!fieldMapper.isSortable()) {
if (!fieldType.isSortable()) {
throw new SearchParseException(context, "Sorting not supported for field[" + fieldName + "]", null);
}
@ -231,7 +230,7 @@ public class SortParseElement implements SearchParseElement {
}*/
// We only support AVG and SUM on number based fields
if (!(fieldMapper instanceof NumberFieldMapper) && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
if (fieldType.isNumeric() == false && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
sortMode = null;
}
if (sortMode == null) {
@ -266,9 +265,9 @@ public class SortParseElement implements SearchParseElement {
nested = null;
}
IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldMapper)
IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldType)
.comparatorSource(missing, sortMode, nested);
sortFields.add(new SortField(fieldMapper.fieldType().names().indexName(), fieldComparatorSource, reverse));
sortFields.add(new SortField(fieldType.names().indexName(), fieldComparatorSource, reverse));
}
}

View File

@ -46,6 +46,7 @@ import org.apache.lucene.util.fst.PairOutputs;
import org.apache.lucene.util.fst.PairOutputs.Pair;
import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat.CompletionLookupProvider;
import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat.LookupFactory;
@ -75,11 +76,11 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
public static final int CODEC_VERSION_CHECKSUMS = 3;
public static final int CODEC_VERSION_LATEST = CODEC_VERSION_CHECKSUMS;
private boolean preserveSep;
private boolean preservePositionIncrements;
private int maxSurfaceFormsPerAnalyzedForm;
private int maxGraphExpansions;
private boolean hasPayloads;
private final boolean preserveSep;
private final boolean preservePositionIncrements;
private final int maxSurfaceFormsPerAnalyzedForm;
private final int maxGraphExpansions;
private final boolean hasPayloads;
private final XAnalyzingSuggester prototype;
public AnalyzingCompletionLookupProvider(boolean preserveSep, boolean exactFirst, boolean preservePositionIncrements, boolean hasPayloads) {
@ -99,6 +100,18 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
return "analyzing";
}
public boolean getPreserveSep() {
return preserveSep;
}
public boolean getPreservePositionsIncrements() {
return preservePositionIncrements;
}
public boolean hasPayloads() {
return hasPayloads;
}
@Override
public FieldsConsumer consumer(final IndexOutput output) throws IOException {
CodecUtil.writeHeader(output, CODEC_NAME, CODEC_VERSION_LATEST);
@ -252,18 +265,18 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
final long ramBytesUsed = sizeInBytes;
return new LookupFactory() {
@Override
public Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext) {
AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.fieldType().names().indexName());
public Lookup getLookup(CompletionFieldMapper.CompletionFieldType fieldType, CompletionSuggestionContext suggestionContext) {
AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(fieldType.names().indexName());
if (analyzingSuggestHolder == null) {
return null;
}
int flags = analyzingSuggestHolder.getPreserveSeparator() ? XAnalyzingSuggester.PRESERVE_SEP : 0;
final XAnalyzingSuggester suggester;
final Automaton queryPrefix = mapper.requiresContext() ? ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null;
final Automaton queryPrefix = fieldType.requiresContext() ? ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null;
if (suggestionContext.isFuzzy()) {
suggester = new XFuzzySuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags,
suggester = new XFuzzySuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags,
analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions,
suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(),
suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(),
@ -271,7 +284,7 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte,
analyzingSuggestHolder.holeCharacter);
} else {
suggester = new XAnalyzingSuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags,
suggester = new XAnalyzingSuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags,
analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions,
analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads,
analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte,
@ -303,8 +316,8 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
}
@Override
AnalyzingSuggestHolder getAnalyzingSuggestHolder(CompletionFieldMapper mapper) {
return lookupMap.get(mapper.fieldType().names().indexName());
AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType) {
return lookupMap.get(fieldType.names().indexName());
}
@Override

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
@ -46,6 +45,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.search.suggest.completion.CompletionTokenStream.ToFiniteStrings;
@ -260,7 +260,7 @@ public class Completion090PostingsFormat extends PostingsFormat {
this.lookup = lookup;
}
public Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext) {
public Lookup getLookup(CompletionFieldMapper.CompletionFieldType mapper, CompletionSuggestionContext suggestionContext) {
return lookup.getLookup(mapper, suggestionContext);
}
@ -340,8 +340,8 @@ public class Completion090PostingsFormat extends PostingsFormat {
}
public static abstract class LookupFactory implements Accountable {
public abstract Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext);
public abstract Lookup getLookup(CompletionFieldMapper.CompletionFieldType fieldType, CompletionSuggestionContext suggestionContext);
public abstract CompletionStats stats(String ... fields);
abstract AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder getAnalyzingSuggestHolder(CompletionFieldMapper mapper);
abstract AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType);
}
}

View File

@ -100,16 +100,16 @@ public class CompletionSuggestParser implements SuggestContextParser {
}
}
suggestion.mapper((CompletionFieldMapper)mapperService.smartNameFieldMapper(suggestion.getField()));
suggestion.fieldType((CompletionFieldMapper.CompletionFieldType) mapperService.smartNameFieldType(suggestion.getField()));
CompletionFieldMapper mapper = suggestion.mapper();
if (mapper != null) {
if (mapper.requiresContext()) {
CompletionFieldMapper.CompletionFieldType fieldType = suggestion.fieldType();
if (fieldType != null) {
if (fieldType.requiresContext()) {
if (contextParser == null) {
throw new IllegalArgumentException("suggester [completion] requires context to be setup");
} else {
contextParser.nextToken();
List<ContextQuery> contextQueries = ContextQuery.parseQueries(mapper.getContextMapping(), contextParser);
List<ContextQuery> contextQueries = ContextQuery.parseQueries(fieldType.getContextMapping(), contextParser);
suggestion.setContextQuery(contextQueries);
}
} else if (contextParser != null) {

View File

@ -50,7 +50,7 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
@Override
protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name,
CompletionSuggestionContext suggestionContext, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
if (suggestionContext.mapper() == null || !(suggestionContext.mapper() instanceof CompletionFieldMapper)) {
if (suggestionContext.fieldType() == null) {
throw new ElasticsearchException("Field [" + suggestionContext.getField() + "] is not a completion suggest field");
}
final IndexReader indexReader = searcher.getIndexReader();
@ -67,7 +67,7 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
Terms terms = atomicReader.fields().terms(fieldName);
if (terms instanceof Completion090PostingsFormat.CompletionTerms) {
final Completion090PostingsFormat.CompletionTerms lookupTerms = (Completion090PostingsFormat.CompletionTerms) terms;
final Lookup lookup = lookupTerms.getLookup(suggestionContext.mapper(), suggestionContext);
final Lookup lookup = lookupTerms.getLookup(suggestionContext.fieldType(), suggestionContext);
if (lookup == null) {
// we don't have a lookup for this segment.. this might be possible if a merge dropped all
// docs from the segment that had a value in this segment.

View File

@ -32,7 +32,7 @@ import java.util.List;
*/
public class CompletionSuggestionContext extends SuggestionSearchContext.SuggestionContext {
private CompletionFieldMapper mapper;
private CompletionFieldMapper.CompletionFieldType fieldType;
private int fuzzyEditDistance = XFuzzySuggester.DEFAULT_MAX_EDITS;
private boolean fuzzyTranspositions = XFuzzySuggester.DEFAULT_TRANSPOSITIONS;
private int fuzzyMinLength = XFuzzySuggester.DEFAULT_MIN_FUZZY_LENGTH;
@ -45,12 +45,12 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest
super(suggester);
}
public CompletionFieldMapper mapper() {
return this.mapper;
public CompletionFieldMapper.CompletionFieldType fieldType() {
return this.fieldType;
}
public void mapper(CompletionFieldMapper mapper) {
this.mapper = mapper;
public void fieldType(CompletionFieldMapper.CompletionFieldType fieldType) {
this.fieldType = fieldType;
}
public void setFuzzyEditDistance(int fuzzyEditDistance) {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.script.CompiledScript;
@ -166,15 +167,15 @@ public final class PhraseSuggestParser implements SuggestContextParser {
throw new IllegalArgumentException("The required field option is missing");
}
FieldMapper fieldMapper = mapperService.smartNameFieldMapper(suggestion.getField());
if (fieldMapper == null) {
MappedFieldType fieldType = mapperService.smartNameFieldType(suggestion.getField());
if (fieldType == null) {
throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]");
} else if (suggestion.getAnalyzer() == null) {
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
if (fieldMapper.fieldType().searchAnalyzer() == null) {
if (fieldType.searchAnalyzer() == null) {
suggestion.setAnalyzer(mapperService.searchAnalyzer());
} else {
suggestion.setAnalyzer(fieldMapper.fieldType().searchAnalyzer());
suggestion.setAnalyzer(fieldType.searchAnalyzer());
}
}
@ -324,7 +325,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator)) {
if ("field".equals(fieldName)) {
generator.setField(parser.text());
if (mapperService.smartNameFieldMapper(generator.field()) == null) {
if (mapperService.smartNameFieldType(generator.field()) == null) {
throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]");
}
} else if ("size".equals(fieldName)) {

View File

@ -29,6 +29,7 @@ import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperService;
@ -65,32 +66,32 @@ public abstract class AbstractFieldDataTests extends ElasticsearchSingleNodeTest
}
public <IFD extends IndexFieldData<?>> IFD getForField(FieldDataType type, String fieldName, boolean docValues) {
final FieldMapper mapper;
final MappedFieldType fieldType;
final BuilderContext context = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
if (type.getType().equals("string")) {
mapper = MapperBuilders.stringField(fieldName).tokenized(false).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.stringField(fieldName).tokenized(false).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("float")) {
mapper = MapperBuilders.floatField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.floatField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("double")) {
mapper = MapperBuilders.doubleField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.doubleField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("long")) {
mapper = MapperBuilders.longField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.longField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("int")) {
mapper = MapperBuilders.integerField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.integerField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("short")) {
mapper = MapperBuilders.shortField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.shortField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("byte")) {
mapper = MapperBuilders.byteField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.byteField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("geo_point")) {
mapper = MapperBuilders.geoPointField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.geoPointField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("_parent")) {
mapper = MapperBuilders.parent().type(fieldName).build(context);
fieldType = MapperBuilders.parent().type(fieldName).build(context).fieldType();
} else if (type.getType().equals("binary")) {
mapper = MapperBuilders.binaryField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context);
fieldType = MapperBuilders.binaryField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else {
throw new UnsupportedOperationException(type.getType());
}
return ifdService.getForField(mapper);
return ifdService.getForField(fieldType);
}
@Before

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.plain.*;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.core.*;
@ -51,7 +52,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
final IndexFieldDataService ifdService = indexService.fieldData();
for (boolean docValues : Arrays.asList(true, false)) {
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
final StringFieldMapper stringMapper = new StringFieldMapper.Builder("string").tokenized(false).docValues(docValues).build(ctx);
final MappedFieldType stringMapper = new StringFieldMapper.Builder("string").tokenized(false).docValues(docValues).build(ctx).fieldType();
ifdService.clear();
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
if (docValues) {
@ -60,11 +61,11 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
assertTrue(fd instanceof PagedBytesIndexFieldData);
}
for (FieldMapper mapper : Arrays.asList(
new ByteFieldMapper.Builder("int").docValues(docValues).build(ctx),
new ShortFieldMapper.Builder("int").docValues(docValues).build(ctx),
new IntegerFieldMapper.Builder("int").docValues(docValues).build(ctx),
new LongFieldMapper.Builder("long").docValues(docValues).build(ctx)
for (MappedFieldType mapper : Arrays.asList(
new ByteFieldMapper.Builder("int").docValues(docValues).build(ctx).fieldType(),
new ShortFieldMapper.Builder("int").docValues(docValues).build(ctx).fieldType(),
new IntegerFieldMapper.Builder("int").docValues(docValues).build(ctx).fieldType(),
new LongFieldMapper.Builder("long").docValues(docValues).build(ctx).fieldType()
)) {
ifdService.clear();
fd = ifdService.getForField(mapper);
@ -75,7 +76,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
}
}
final FloatFieldMapper floatMapper = new FloatFieldMapper.Builder("float").docValues(docValues).build(ctx);
final MappedFieldType floatMapper = new FloatFieldMapper.Builder("float").docValues(docValues).build(ctx).fieldType();
ifdService.clear();
fd = ifdService.getForField(floatMapper);
if (docValues) {
@ -84,7 +85,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
assertTrue(fd instanceof FloatArrayIndexFieldData);
}
final DoubleFieldMapper doubleMapper = new DoubleFieldMapper.Builder("double").docValues(docValues).build(ctx);
final MappedFieldType doubleMapper = new DoubleFieldMapper.Builder("double").docValues(docValues).build(ctx).fieldType();
ifdService.clear();
fd = ifdService.getForField(doubleMapper);
if (docValues) {
@ -100,29 +101,29 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
final IndexService indexService = createIndex("test");
final IndexFieldDataService ifdService = indexService.fieldData();
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
final StringFieldMapper stringMapper = MapperBuilders.stringField("string").tokenized(false).fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(Settings.builder().put("format", "fst").build()).build(ctx);
final MappedFieldType stringMapper = MapperBuilders.stringField("string").tokenized(false).fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(Settings.builder().put("format", "fst").build()).build(ctx).fieldType();
ifdService.clear();
IndexFieldData<?> fd = ifdService.getForField(stringMapper);
assertTrue(fd instanceof FSTBytesIndexFieldData);
final Settings fdSettings = Settings.builder().put("format", "array").build();
for (FieldMapper mapper : Arrays.asList(
new ByteFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx),
new ShortFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx),
new IntegerFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx),
new LongFieldMapper.Builder("long").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx)
for (MappedFieldType mapper : Arrays.asList(
new ByteFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType(),
new ShortFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType(),
new IntegerFieldMapper.Builder("int").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType(),
new LongFieldMapper.Builder("long").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType()
)) {
ifdService.clear();
fd = ifdService.getForField(mapper);
assertTrue(fd instanceof PackedArrayIndexFieldData);
}
final FloatFieldMapper floatMapper = MapperBuilders.floatField("float").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx);
final MappedFieldType floatMapper = MapperBuilders.floatField("float").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType();
ifdService.clear();
fd = ifdService.getForField(floatMapper);
assertTrue(fd instanceof FloatArrayIndexFieldData);
final DoubleFieldMapper doubleMapper = MapperBuilders.doubleField("double").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx);
final MappedFieldType doubleMapper = MapperBuilders.doubleField("double").fieldDataSettings(DOC_VALUES_SETTINGS).fieldDataSettings(fdSettings).build(ctx).fieldType();
ifdService.clear();
fd = ifdService.getForField(doubleMapper);
assertTrue(fd instanceof DoubleArrayIndexFieldData);
@ -132,7 +133,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
final IndexService indexService = createIndex("test");
final IndexFieldDataService ifdService = indexService.fieldData();
final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1));
final StringFieldMapper mapper1 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx);
final MappedFieldType mapper1 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "paged_bytes").build()).build(ctx).fieldType();
final IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(new KeywordAnalyzer()));
Document doc = new Document();
doc.add(new StringField("s", "thisisastring", Store.NO));
@ -149,7 +150,7 @@ public class IndexFieldDataServiceTests extends ElasticsearchSingleNodeTest {
// write new segment
writer.addDocument(doc);
final IndexReader reader2 = DirectoryReader.open(writer, true);
final StringFieldMapper mapper2 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "fst").build()).build(ctx);
final MappedFieldType mapper2 = MapperBuilders.stringField("s").tokenized(false).fieldDataSettings(Settings.builder().put(FieldDataType.FORMAT_KEY, "fst").build()).build(ctx).fieldType();
ifdService.onMappingUpdate();
ifd = ifdService.getForField(mapper2);
assertThat(ifd, instanceOf(FSTBytesIndexFieldData.class));

View File

@ -165,8 +165,8 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
public void testDynamicMappingOnEmptyString() throws Exception {
IndexService service = createIndex("test");
client().prepareIndex("test", "type").setSource("empty_field", "").get();
FieldMapper mapper = service.mapperService().fullName("empty_field");
assertNotNull(mapper);
MappedFieldType fieldType = service.mapperService().fullName("empty_field");
assertNotNull(fieldType);
}
public void testTypeNotCreatedOnIndexFailure() throws IOException, InterruptedException {

View File

@ -90,7 +90,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
public void testBasicQuerySanities() {
Query childQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query);
@ -127,7 +127,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
int shortCircuitParentDocSet = random().nextInt(5);
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
ChildrenConstantScoreQuery query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet, null);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());

View File

@ -108,7 +108,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
Query childQuery = new TermQuery(new Term("field", "value"));
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
int minChildren = random().nextInt(10);
int maxChildren = scaledRandomIntBetween(minChildren, 10);

View File

@ -90,7 +90,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
public void testBasicQuerySanities() {
Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child"))));
Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query);

View File

@ -94,7 +94,7 @@ public class ParentQueryTests extends AbstractChildTests {
public void testBasicQuerySanities() {
Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child"))));
Query query = new ParentQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query);

Some files were not shown because too many files have changed in this diff Show More