Merge remote-tracking branch 'es/master' into feature/ingest

This commit is contained in:
Martijn van Groningen 2015-12-23 15:16:20 +01:00
commit 767114adec
202 changed files with 1720 additions and 1670 deletions

View File

@ -226,7 +226,7 @@ public class MapperQueryParser extends QueryParser {
}
}
if (query == null) {
query = super.getFieldQuery(currentFieldType.names().indexName(), queryText, quoted);
query = super.getFieldQuery(currentFieldType.name(), queryText, quoted);
}
return query;
}
@ -466,7 +466,7 @@ public class MapperQueryParser extends QueryParser {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context);
}
if (query == null) {
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.names().indexName(), termStr);
query = getPossiblyAnalyzedPrefixQuery(currentFieldType.name(), termStr);
}
return query;
}
@ -592,7 +592,7 @@ public class MapperQueryParser extends QueryParser {
if (!settings.forceAnalyzer()) {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
indexedNameField = currentFieldType.names().indexName();
indexedNameField = currentFieldType.name();
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);
}
return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr);

View File

@ -126,13 +126,13 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
if (indexService == null) {
throw new IllegalArgumentException("No index provided, and trying to analyzer based on a specific field which requires the index parameter");
}
MappedFieldType fieldType = indexService.mapperService().smartNameFieldType(request.field());
MappedFieldType fieldType = indexService.mapperService().fullName(request.field());
if (fieldType != null) {
if (fieldType.isNumeric()) {
throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
}
analyzer = fieldType.indexAnalyzer();
field = fieldType.names().indexName();
field = fieldType.name();
}
}
if (field == null) {

View File

@ -171,7 +171,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
for (String field : request.fields()) {
if (Regex.isMatchAllPattern(field)) {
for (FieldMapper fieldMapper : allFieldMappers) {
addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults());
addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults());
}
} else if (Regex.isSimpleMatchPattern(field)) {
// go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name.
@ -179,15 +179,15 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
Collection<FieldMapper> remainingFieldMappers = newLinkedList(allFieldMappers);
for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) {
addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults());
if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) {
addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults());
it.remove();
}
}
for (Iterator<FieldMapper> it = remainingFieldMappers.iterator(); it.hasNext(); ) {
final FieldMapper fieldMapper = it.next();
if (Regex.simpleMatch(field, fieldMapper.fieldType().names().indexName())) {
addFieldMapper(fieldMapper.fieldType().names().indexName(), fieldMapper, fieldMappings, request.includeDefaults());
if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) {
addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults());
it.remove();
}
}
@ -214,7 +214,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
builder.startObject();
fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS);
builder.endObject();
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().names().fullName(), builder.bytes()));
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), builder.bytes()));
} catch (IOException e) {
throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e);
}

View File

@ -259,9 +259,8 @@ public class MetaDataMappingService extends AbstractComponent {
} else {
newMapper = indexService.mapperService().parse(request.type(), mappingUpdateSource, existingMapper == null);
if (existingMapper != null) {
// first, simulate
// this will just throw exceptions in case of problems
existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes());
// first, simulate: just call merge and ignore the result
existingMapper.merge(newMapper.mapping(), request.updateAllTypes());
} else {
// TODO: can we find a better place for this validation?
// The reason this validation is here is that the mapper service doesn't learn about

View File

@ -1,152 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import java.io.IOException;
abstract class CustomBoostFactorScorer extends Scorer {
final Scorer scorer;
final DocIdSetIterator iterator;
final float maxBoost;
final CombineFunction scoreCombiner;
Float minScore;
NextDoc nextDoc;
CustomBoostFactorScorer(Weight w, Scorer scorer, float maxBoost, CombineFunction scoreCombiner, Float minScore)
throws IOException {
super(w);
if (minScore == null) {
nextDoc = new AnyNextDoc();
} else {
nextDoc = new MinScoreNextDoc();
}
this.scorer = scorer;
this.iterator = scorer.iterator();
this.maxBoost = maxBoost;
this.scoreCombiner = scoreCombiner;
this.minScore = minScore;
}
@Override
public int docID() {
return scorer.docID();
}
@Override
public DocIdSetIterator iterator() {
return new DocIdSetIterator() {
@Override
public int nextDoc() throws IOException {
return nextDoc.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return nextDoc.advance(target);
}
@Override
public long cost() {
return iterator.cost();
}
@Override
public int docID() {
return iterator.docID();
}
};
}
public abstract float innerScore() throws IOException;
@Override
public float score() throws IOException {
return nextDoc.score();
}
@Override
public int freq() throws IOException {
return scorer.freq();
}
public interface NextDoc {
public int advance(int target) throws IOException;
public int nextDoc() throws IOException;
public float score() throws IOException;
}
public class MinScoreNextDoc implements NextDoc {
float currentScore = Float.MAX_VALUE * -1.0f;
@Override
public int nextDoc() throws IOException {
int doc;
do {
doc = iterator.nextDoc();
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
} while (currentScore < minScore);
return doc;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public int advance(int target) throws IOException {
int doc = iterator.advance(target);
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
if (currentScore < minScore) {
return iterator.nextDoc();
}
return doc;
}
}
public class AnyNextDoc implements NextDoc {
@Override
public int nextDoc() throws IOException {
return iterator.nextDoc();
}
@Override
public float score() throws IOException {
return innerScore();
}
@Override
public int advance(int target) throws IOException {
return iterator.advance(target);
}
}
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
@ -142,7 +143,7 @@ public class FiltersFunctionScoreQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
if (needsScores == false) {
if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores);
}
@ -184,11 +185,7 @@ public class FiltersFunctionScoreQuery extends Query {
subQueryWeight.normalize(norm, boost);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
// we ignore scoreDocsInOrder parameter, because we need to score in
// order if documents are scored with a script. The
// ShardLookup depends on in order scoring.
private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) {
return null;
@ -201,15 +198,24 @@ public class FiltersFunctionScoreQuery extends Query {
Scorer filterScorer = filterWeights[i].scorer(context);
docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
}
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore, needsScores);
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, needsScores);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
Scorer scorer = functionScorer(context);
if (scorer != null && minScore != null) {
scorer = new MinScoreScorer(this, scorer, minScore);
}
return scorer;
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
Explanation expl = subQueryWeight.explain(context, doc);
if (!expl.isMatch()) {
return expl;
}
// First: Gather explanations for all filters
List<Explanation> filterExplanations = new ArrayList<>();
@ -218,7 +224,7 @@ public class FiltersFunctionScoreQuery extends Query {
filterWeights[i].scorer(context));
if (docSet.get(doc)) {
FilterFunction filterFunction = filterFunctions[i];
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, expl);
double factor = functionExplanation.getValue();
float sc = CombineFunction.toFloat(factor);
Explanation filterExplanation = Explanation.match(sc, "function score, product of:",
@ -226,46 +232,52 @@ public class FiltersFunctionScoreQuery extends Query {
filterExplanations.add(filterExplanation);
}
}
if (filterExplanations.size() == 0) {
return subQueryExpl;
if (filterExplanations.size() > 0) {
FiltersFunctionFactorScorer scorer = functionScorer(context);
int actualDoc = scorer.iterator().advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, expl.getValue());
Explanation factorExplanation = Explanation.match(
CombineFunction.toFloat(score),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
expl = combineFunction.explain(expl, factorExplanation, maxBoost);
}
FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context);
int actualDoc = scorer.iterator.advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, subQueryExpl.getValue());
Explanation factorExplanation = Explanation.match(
CombineFunction.toFloat(score),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost);
if (minScore != null && minScore > expl.getValue()) {
expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl);
}
return expl;
}
}
static class FiltersFunctionFactorScorer extends CustomBoostFactorScorer {
static class FiltersFunctionFactorScorer extends FilterScorer {
private final FilterFunction[] filterFunctions;
private final ScoreMode scoreMode;
private final LeafScoreFunction[] functions;
private final Bits[] docSets;
private final CombineFunction scoreCombiner;
private final float maxBoost;
private final boolean needsScores;
private FiltersFunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, FilterFunction[] filterFunctions,
float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, Float minScore, boolean needsScores) throws IOException {
super(w, scorer, maxBoost, scoreCombiner, minScore);
float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, boolean needsScores) throws IOException {
super(scorer, w);
this.scoreMode = scoreMode;
this.filterFunctions = filterFunctions;
this.functions = functions;
this.docSets = docSets;
this.scoreCombiner = scoreCombiner;
this.maxBoost = maxBoost;
this.needsScores = needsScores;
}
@Override
public float innerScore() throws IOException {
int docId = scorer.docID();
public float score() throws IOException {
int docId = docID();
// Even if the weight is created with needsScores=false, it might
// be costly to call score(), so we explicitly check if scores
// are needed
float subQueryScore = needsScores ? scorer.score() : 0f;
float subQueryScore = needsScores ? super.score() : 0f;
double factor = computeScore(docId, subQueryScore);
return scoreCombiner.combine(subQueryScore, factor, maxBoost);
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
@ -90,7 +91,7 @@ public class FunctionScoreQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
if (needsScores == false) {
if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores);
}
@ -128,8 +129,7 @@ public class FunctionScoreQuery extends Query {
subQueryWeight.normalize(norm, boost);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) {
return null;
@ -138,7 +138,16 @@ public class FunctionScoreQuery extends Query {
if (function != null) {
leafFunction = function.getLeafScoreFunction(context);
}
return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, minScore, needsScores);
return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, needsScores);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
Scorer scorer = functionScorer(context);
if (scorer != null && minScore != null) {
scorer = new MinScoreScorer(this, scorer, minScore);
}
return scorer;
}
@Override
@ -147,38 +156,47 @@ public class FunctionScoreQuery extends Query {
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
}
Explanation expl;
if (function != null) {
Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost);
expl = combineFunction.explain(subQueryExpl, functionExplanation, maxBoost);
} else {
return subQueryExpl;
expl = subQueryExpl;
}
if (minScore != null && minScore > expl.getValue()) {
expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl);
}
return expl;
}
}
static class FunctionFactorScorer extends CustomBoostFactorScorer {
static class FunctionFactorScorer extends FilterScorer {
private final LeafScoreFunction function;
private final boolean needsScores;
private final CombineFunction scoreCombiner;
private final float maxBoost;
private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, Float minScore, boolean needsScores)
private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, boolean needsScores)
throws IOException {
super(w, scorer, maxBoost, scoreCombiner, minScore);
super(scorer, w);
this.function = function;
this.scoreCombiner = scoreCombiner;
this.maxBoost = maxBoost;
this.needsScores = needsScores;
}
@Override
public float innerScore() throws IOException {
public float score() throws IOException {
// Even if the weight is created with needsScores=false, it might
// be costly to call score(), so we explicitly check if scores
// are needed
float score = needsScores ? scorer.score() : 0f;
float score = needsScores ? super.score() : 0f;
if (function == null) {
return score;
} else {
return scoreCombiner.combine(score,
function.score(scorer.docID(), score), maxBoost);
function.score(docID(), score), maxBoost);
}
}
}

View File

@ -0,0 +1,95 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search.function;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.ScoreCachingWrappingScorer;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
/** A {@link Scorer} that filters out documents that have a score that is
* lower than a configured constant. */
final class MinScoreScorer extends Scorer {
private final Scorer in;
private final float minScore;
MinScoreScorer(Weight weight, Scorer scorer, float minScore) {
super(weight);
if (scorer instanceof ScoreCachingWrappingScorer == false) {
// when minScore is set, scores might be requested twice: once
// to verify the match, and once by the collector
scorer = new ScoreCachingWrappingScorer(scorer);
}
this.in = scorer;
this.minScore = minScore;
}
public Scorer getScorer() {
return in;
}
@Override
public int docID() {
return in.docID();
}
@Override
public float score() throws IOException {
return in.score();
}
@Override
public int freq() throws IOException {
return in.freq();
}
@Override
public DocIdSetIterator iterator() {
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
}
@Override
public TwoPhaseIterator twoPhaseIterator() {
final TwoPhaseIterator inTwoPhase = this.in.twoPhaseIterator();
final DocIdSetIterator approximation = inTwoPhase == null ? in.iterator() : inTwoPhase.approximation();
return new TwoPhaseIterator(approximation) {
@Override
public boolean matches() throws IOException {
// we need to check the two-phase iterator first
// otherwise calling score() is illegal
if (inTwoPhase != null && inTwoPhase.matches() == false) {
return false;
}
return in.score() >= minScore;
}
@Override
public float matchCost() {
return 1000f // random constant for the score computation
+ (inTwoPhase == null ? 0 : inTwoPhase.matchCost());
}
};
}
}

View File

@ -459,21 +459,21 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
@Override
public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) {
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onCache(shardId, fieldNames, fieldDataType, ramUsage);
shard.fieldData().onCache(shardId, fieldName, fieldDataType, ramUsage);
}
}
}
@Override
public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onRemoval(shardId, fieldNames, fieldDataType, wasEvicted, sizeInBytes);
shard.fieldData().onRemoval(shardId, fieldName, fieldDataType, wasEvicted, sizeInBytes);
}
}
}

View File

@ -23,36 +23,24 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import java.util.AbstractMap;
import java.util.Map;
import java.util.stream.Stream;
/**
*
*/
public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
private final CopyOnWriteHashMap<String, Analyzer> analyzers;
private final Analyzer defaultAnalyzer;
private final Map<String, Analyzer> analyzers;
public FieldNameAnalyzer(Analyzer defaultAnalyzer) {
this(new CopyOnWriteHashMap<>(), defaultAnalyzer);
}
public FieldNameAnalyzer(Map<String, Analyzer> analyzers, Analyzer defaultAnalyzer) {
public FieldNameAnalyzer(Map<String, Analyzer> analyzers) {
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
this.analyzers = CopyOnWriteHashMap.copyOf(analyzers);
this.defaultAnalyzer = defaultAnalyzer;
}
public Map<String, Analyzer> analyzers() {
return analyzers;
}
public Analyzer defaultAnalyzer() {
return defaultAnalyzer;
}
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
Analyzer analyzer = analyzers.get(fieldName);
@ -63,18 +51,4 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
// Fields need to be explicitly added
throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer");
}
/**
* Return a new instance that contains the union of this and of the provided analyzers.
*/
public FieldNameAnalyzer copyAndAddAll(Stream<? extends Map.Entry<String, Analyzer>> mappers) {
CopyOnWriteHashMap<String, Analyzer> result = analyzers.copyAndPutAll(mappers.map((e) -> {
if (e.getValue() == null) {
return new AbstractMap.SimpleImmutableEntry<>(e.getKey(), defaultAnalyzer);
}
return e;
}));
return new FieldNameAnalyzer(result, defaultAnalyzer);
}
}

View File

@ -54,7 +54,7 @@ public class PerFieldMappingPostingFormatCodec extends Lucene54Codec {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
final MappedFieldType indexName = mapperService.indexName(field);
final MappedFieldType indexName = mapperService.fullName(field);
if (indexName == null) {
logger.warn("no index mapper found for field: [{}] returning default postings format", field);
} else if (indexName instanceof CompletionFieldMapper.CompletionFieldType) {

View File

@ -80,7 +80,7 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
/**
* The field name.
*/
MappedFieldType.Names getFieldNames();
String getFieldName();
/**
* The field data type.

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.shard.ShardId;
/**
@ -49,12 +48,12 @@ public interface IndexFieldDataCache {
/**
* Called after the fielddata is loaded during the cache phase
*/
void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage);
void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage);
/**
* Called after the fielddata is unloaded
*/
void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes);
void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes);
}
class None implements IndexFieldDataCache {

View File

@ -34,7 +34,6 @@ import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
@ -61,7 +60,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> {
throw new IllegalStateException("Can't load fielddata on [" + fieldType.names().fullName()
throw new IllegalStateException("Can't load fielddata on [" + fieldType.name()
+ "] of index [" + indexProperties.getIndex().getName() + "] because fielddata is unsupported on fields of type ["
+ fieldType.fieldDataType().getType() + "]. Use doc values instead.");
};
@ -148,11 +147,11 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
private final MapperService mapperService;
private static final IndexFieldDataCache.Listener DEFAULT_NOOP_LISTENER = new IndexFieldDataCache.Listener() {
@Override
public void onCache(ShardId shardId, Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) {
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
}
@Override
public void onRemoval(ShardId shardId, Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
}
};
private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER;
@ -195,22 +194,22 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
final Names fieldNames = fieldType.names();
final String fieldName = fieldType.name();
final FieldDataType type = fieldType.fieldDataType();
if (type == null) {
throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]");
throw new IllegalArgumentException("found no fielddata type for field [" + fieldName + "]");
}
final boolean docValues = fieldType.hasDocValues();
IndexFieldData.Builder builder = null;
String format = type.getFormat(indexSettings.getSettings());
if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) {
logger.warn("field [" + fieldNames.fullName() + "] has no doc values, will use default field data format");
logger.warn("field [" + fieldName + "] has no doc values, will use default field data format");
format = null;
}
if (format != null) {
builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format));
if (builder == null) {
logger.warn("failed to find format [" + format + "] for field [" + fieldNames.fullName() + "], will use default");
logger.warn("failed to find format [" + format + "] for field [" + fieldName + "], will use default");
}
}
if (builder == null && docValues) {
@ -220,24 +219,24 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
builder = buildersByType.get(type.getType());
}
if (builder == null) {
throw new IllegalArgumentException("failed to find field data builder for field " + fieldNames.fullName() + ", and type " + type.getType());
throw new IllegalArgumentException("failed to find field data builder for field " + fieldName + ", and type " + type.getType());
}
IndexFieldDataCache cache;
synchronized (this) {
cache = fieldDataCaches.get(fieldNames.indexName());
cache = fieldDataCaches.get(fieldName);
if (cache == null) {
// we default to node level cache, which in turn defaults to be unbounded
// this means changing the node level settings is simple, just set the bounds there
String cacheType = type.getSettings().get("cache", indexSettings.getSettings().get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE));
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldNames, type);
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type);
} else if ("none".equals(cacheType)){
cache = new IndexFieldDataCache.None();
} else {
throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldNames.fullName() + "]");
throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]");
}
fieldDataCaches.put(fieldNames.indexName(), cache);
fieldDataCaches.put(fieldName, cache);
}
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.shard.ShardId;
import java.util.Map;
@ -52,16 +51,15 @@ public class ShardFieldData implements IndexFieldDataCache.Listener {
}
@Override
public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) {
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
totalMetric.inc(ramUsage.ramBytesUsed());
String keyFieldName = fieldNames.indexName();
CounterMetric total = perFieldTotals.get(keyFieldName);
CounterMetric total = perFieldTotals.get(fieldName);
if (total != null) {
total.inc(ramUsage.ramBytesUsed());
} else {
total = new CounterMetric();
total.inc(ramUsage.ramBytesUsed());
CounterMetric prev = perFieldTotals.putIfAbsent(keyFieldName, total);
CounterMetric prev = perFieldTotals.putIfAbsent(fieldName, total);
if (prev != null) {
prev.inc(ramUsage.ramBytesUsed());
}
@ -69,15 +67,14 @@ public class ShardFieldData implements IndexFieldDataCache.Listener {
}
@Override
public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
if (wasEvicted) {
evictionsMetric.inc();
}
if (sizeInBytes != -1) {
totalMetric.dec(sizeInBytes);
String keyFieldName = fieldNames.indexName();
CounterMetric total = perFieldTotals.get(keyFieldName);
CounterMetric total = perFieldTotals.get(fieldName);
if (total != null) {
total.dec(sizeInBytes);
}

View File

@ -80,7 +80,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName());
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed;
final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed);

View File

@ -65,7 +65,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName());
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final double dMissingValue = (Double) missingObject(missingValue, reversed);
// NOTE: it's important to pass null as a missing value in the constructor so that

View File

@ -57,7 +57,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName());
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final float dMissingValue = (Float) missingObject(missingValue, reversed);
// NOTE: it's important to pass null as a missing value in the constructor so that

View File

@ -56,7 +56,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldNames().indexName());
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final Long dMissingValue = (Long) missingObject(missingValue, reversed);
// NOTE: it's important to pass null as a missing value in the constructor so that

View File

@ -64,12 +64,12 @@ public enum GlobalOrdinalsBuilder {
if (logger.isDebugEnabled()) {
logger.debug(
"Global-ordinals[{}][{}] took {} ms",
indexFieldData.getFieldNames().fullName(),
indexFieldData.getFieldName(),
ordinalMap.getValueCount(),
TimeValue.nsecToMSec(System.nanoTime() - startTimeNS)
);
}
return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldNames(),
return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(),
indexFieldData.getFieldDataType(), atomicFD, ordinalMap, memorySizeInBytes
);
}
@ -103,7 +103,7 @@ public enum GlobalOrdinalsBuilder {
subs[i] = atomicFD[i].getOrdinalsValues();
}
final OrdinalMap ordinalMap = OrdinalMap.build(null, subs, PackedInts.DEFAULT);
return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldNames(),
return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(),
indexFieldData.getFieldDataType(), atomicFD, ordinalMap, 0
);
}

View File

@ -40,13 +40,13 @@ import java.util.Collections;
*/
public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable {
private final MappedFieldType.Names fieldNames;
private final String fieldName;
private final FieldDataType fieldDataType;
private final long memorySizeInBytes;
protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) {
protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, long memorySizeInBytes) {
super(indexSettings);
this.fieldNames = fieldNames;
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.memorySizeInBytes = memorySizeInBytes;
}
@ -67,8 +67,8 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen
}
@Override
public MappedFieldType.Names getFieldNames() {
return fieldNames;
public String getFieldName() {
return fieldName;
}
@Override

View File

@ -37,8 +37,8 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
private final Atomic[] atomicReaders;
InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) {
super(indexSettings, fieldNames, fieldDataType, memorySizeInBytes);
InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) {
super(indexSettings, fieldName, fieldDataType, memorySizeInBytes);
this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) {
atomicReaders[i] = new Atomic(segmentAfd[i], ordinalMap, i);

View File

@ -32,7 +32,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.N
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
@ -41,8 +40,8 @@ import java.io.IOException;
public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData {
AbstractGeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
AbstractGeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super(index, fieldName, fieldDataType);
}
@Override
@ -56,8 +55,8 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie
public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData {
final boolean indexCreatedBefore2x;
public GeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) {
super(index, fieldNames, fieldDataType);
public GeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) {
super(index, fieldName, fieldDataType);
this.indexCreatedBefore2x = indexCreatedBefore2x;
}
@ -65,9 +64,9 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie
public AtomicGeoPointFieldData load(LeafReaderContext context) {
try {
if (indexCreatedBefore2x) {
return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName()));
return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName));
}
return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldNames.indexName()));
return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
@ -84,7 +83,7 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.names(), fieldType.fieldDataType(),
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(), fieldType.fieldDataType(),
indexSettings.getIndexVersionCreated().before(Version.V_2_2_0));
}
}

View File

@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.RamAccountingTermsEnum;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
@ -39,20 +38,20 @@ import java.io.IOException;
*/
public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends AbstractIndexComponent implements IndexFieldData<FD> {
private final MappedFieldType.Names fieldNames;
private final String fieldName;
protected final FieldDataType fieldDataType;
protected final IndexFieldDataCache cache;
public AbstractIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
public AbstractIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(indexSettings);
this.fieldNames = fieldNames;
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.cache = cache;
}
@Override
public MappedFieldType.Names getFieldNames() {
return this.fieldNames;
public String getFieldName() {
return this.fieldName;
}
@Override
@ -62,12 +61,12 @@ public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends
@Override
public void clear() {
cache.clear(fieldNames.indexName());
cache.clear(fieldName);
}
@Override
public FD load(LeafReaderContext context) {
if (context.reader().getFieldInfos().fieldInfo(fieldNames.indexName()) == null) {
if (context.reader().getFieldInfos().fieldInfo(fieldName) == null) {
// Some leaf readers may be wrapped and report different set of fields and use the same cache key.
// If a field can't be found then it doesn't mean it isn't there,
// so if a field doesn't exist then we don't cache it and just return an empty field data instance.

View File

@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
@ -92,8 +91,8 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
}
}
public AbstractIndexGeoPointFieldData(IndexSettings indexSettings, Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(indexSettings, fieldNames, fieldDataType, cache);
public AbstractIndexGeoPointFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(indexSettings, fieldName, fieldDataType, cache);
}
@Override

View File

@ -37,7 +37,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
@ -52,9 +51,9 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD
protected Settings regex;
protected final CircuitBreakerService breakerService;
protected AbstractIndexOrdinalsFieldData(IndexSettings indexSettings, Names fieldNames, FieldDataType fieldDataType,
protected AbstractIndexOrdinalsFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType,
IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(indexSettings, fieldNames, fieldDataType, cache);
super(indexSettings, fieldName, fieldDataType, cache);
final Map<String, Settings> groups = fieldDataType.getSettings().getGroups("filter");
frequency = groups.get("frequency");
regex = groups.get("regex");
@ -74,7 +73,7 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD
}
boolean fieldFound = false;
for (LeafReaderContext context : indexReader.leaves()) {
if (context.reader().getFieldInfos().fieldInfo(getFieldNames().indexName()) != null) {
if (context.reader().getFieldInfos().fieldInfo(getFieldName()) != null) {
fieldFound = true;
break;
}

View File

@ -25,18 +25,17 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<BinaryDVAtomicFieldData> {
public BinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
public BinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super(index, fieldName, fieldDataType);
}
@Override
public BinaryDVAtomicFieldData load(LeafReaderContext context) {
return new BinaryDVAtomicFieldData(context.reader(), fieldNames.indexName());
return new BinaryDVAtomicFieldData(context.reader(), fieldName);
}
@Override

View File

@ -29,7 +29,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
@ -38,8 +37,8 @@ import java.io.IOException;
public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<BytesBinaryDVAtomicFieldData> {
public BytesBinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
public BytesBinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super(index, fieldName, fieldDataType);
}
@Override
@ -50,7 +49,7 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
@Override
public BytesBinaryDVAtomicFieldData load(LeafReaderContext context) {
try {
return new BytesBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName()));
return new BytesBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
@ -67,8 +66,8 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final Names fieldNames = fieldType.names();
return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType());
final String fieldName = fieldType.name();
return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType());
}
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
@ -43,12 +42,12 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
public IndexFieldData<AtomicFieldData> build(IndexSettings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
return new DisabledIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache);
return new DisabledIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache);
}
}
public DisabledIndexFieldData(IndexSettings indexSettings, Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(indexSettings, fieldNames, fieldDataType, cache);
public DisabledIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(indexSettings, fieldName, fieldDataType, cache);
}
@Override
@ -67,7 +66,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData<AtomicF
}
private IllegalStateException fail() {
return new IllegalStateException("Field data loading is forbidden on " + getFieldNames().fullName());
return new IllegalStateException("Field data loading is forbidden on " + getFieldName());
}
}

View File

@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -46,20 +45,20 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet;
public abstract class DocValuesIndexFieldData {
protected final Index index;
protected final Names fieldNames;
protected final String fieldName;
protected final FieldDataType fieldDataType;
protected final ESLogger logger;
public DocValuesIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
public DocValuesIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super();
this.index = index;
this.fieldNames = fieldNames;
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.logger = Loggers.getLogger(getClass());
}
public final Names getFieldNames() {
return fieldNames;
public final String getFieldName() {
return fieldName;
}
public final FieldDataType getFieldDataType() {
@ -92,20 +91,20 @@ public abstract class DocValuesIndexFieldData {
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
final Names fieldNames = fieldType.names();
final String fieldName = fieldType.name();
final Settings fdSettings = fieldType.fieldDataType().getSettings();
final Map<String, Settings> filter = fdSettings.getGroups("filter");
if (filter != null && !filter.isEmpty()) {
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]");
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldName + "]");
}
if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) {
if (BINARY_INDEX_FIELD_NAMES.contains(fieldName)) {
assert numericType == null;
return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType());
return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType());
} else if (numericType != null) {
return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldNames, numericType, fieldType.fieldDataType());
return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldName, numericType, fieldType.fieldDataType());
} else {
return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldNames, breakerService, fieldType.fieldDataType());
return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldName, breakerService, fieldType.fieldDataType());
}
}

View File

@ -54,17 +54,17 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new GeoPointArrayIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache,
return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache,
breakerService, fieldType.fieldDataType().getSettings()
.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).before(Version.V_2_2_0) ||
indexSettings.getIndexVersionCreated().before(Version.V_2_2_0));
}
}
public GeoPointArrayIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames,
public GeoPointArrayIndexFieldData(IndexSettings indexSettings, String fieldName,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService,
final boolean indexCreatedBefore22) {
super(indexSettings, fieldNames, fieldDataType, cache);
super(indexSettings, fieldName, fieldDataType, cache);
this.breakerService = breakerService;
this.indexCreatedBefore22 = indexCreatedBefore22;
}
@ -73,7 +73,7 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception {
LeafReader reader = context.reader();
Terms terms = reader.terms(getFieldNames().indexName());
Terms terms = reader.terms(getFieldName());
AtomicGeoPointFieldData data = null;
// TODO: Use an actual estimator to estimate before loading.
NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA));

View File

@ -46,7 +46,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new IndexIndexFieldData(indexSettings, fieldType.names());
return new IndexIndexFieldData(indexSettings, fieldType.name());
}
}
@ -100,8 +100,8 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
private final AtomicOrdinalsFieldData atomicFieldData;
private IndexIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names names) {
super(indexSettings, names, new FieldDataType("string"), null, null);
private IndexIndexFieldData(IndexSettings indexSettings, String name) {
super(indexSettings, name, new FieldDataType("string"), null, null);
atomicFieldData = new IndexAtomicFieldData(index().name());
}

View File

@ -57,13 +57,13 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
@Override
public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new PagedBytesIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache, breakerService);
}
}
public PagedBytesIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames,
public PagedBytesIndexFieldData(IndexSettings indexSettings, String fieldName,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(indexSettings, fieldNames, fieldDataType, cache, breakerService);
super(indexSettings, fieldName, fieldDataType, cache, breakerService);
}
@Override
@ -71,8 +71,8 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
LeafReader reader = context.reader();
AtomicOrdinalsFieldData data = null;
PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldNames().fullName());
Terms terms = reader.terms(getFieldNames().indexName());
PagedBytesEstimator estimator = new PagedBytesEstimator(context, breakerService.getBreaker(CircuitBreaker.FIELDDATA), getFieldName());
Terms terms = reader.terms(getFieldName());
if (terms == null) {
data = AbstractAtomicOrdinalsFieldData.empty();
estimator.afterLoad(null, data.ramBytesUsed());
@ -167,10 +167,10 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
public long estimateStringFieldData() {
try {
LeafReader reader = context.reader();
Terms terms = reader.terms(getFieldNames().indexName());
Terms terms = reader.terms(getFieldName());
Fields fields = reader.fields();
final Terms fieldTerms = fields.terms(getFieldNames().indexName());
final Terms fieldTerms = fields.terms(getFieldName());
if (fieldTerms instanceof FieldReader) {
final Stats stats = ((FieldReader) fieldTerms).getStats();

View File

@ -48,7 +48,6 @@ import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -75,10 +74,10 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
private final Set<String> parentTypes;
private final CircuitBreakerService breakerService;
public ParentChildIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames,
public ParentChildIndexFieldData(IndexSettings indexSettings, String fieldName,
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
CircuitBreakerService breakerService) {
super(indexSettings, fieldNames, fieldDataType, cache);
super(indexSettings, fieldName, fieldDataType, cache);
this.breakerService = breakerService;
Set<String> parentTypes = new HashSet<>();
for (DocumentMapper mapper : mapperService.docMappers(false)) {
@ -147,7 +146,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService,
MapperService mapperService) {
return new ParentChildIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache,
return new ParentChildIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache,
mapperService, breakerService);
}
}
@ -319,8 +318,8 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
}
@Override
public Names getFieldNames() {
return ParentChildIndexFieldData.this.getFieldNames();
public String getFieldName() {
return ParentChildIndexFieldData.this.getFieldName();
}
@Override

View File

@ -38,7 +38,6 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
@ -52,7 +51,7 @@ import java.util.Collections;
public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData {
private final NumericType numericType;
public SortedNumericDVIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) {
public SortedNumericDVIndexFieldData(Index index, String fieldNames, NumericType numericType, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
if (numericType == null) {
throw new IllegalArgumentException("numericType must be non-null");
@ -86,7 +85,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
@Override
public AtomicNumericFieldData load(LeafReaderContext context) {
final LeafReader reader = context.reader();
final String field = fieldNames.indexName();
final String field = fieldName;
switch (numericType) {
case FLOAT:

View File

@ -31,9 +31,8 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import java.io.IOException;
@ -43,8 +42,8 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i
private final IndexFieldDataCache cache;
private final CircuitBreakerService breakerService;
public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, Names fieldNames, CircuitBreakerService breakerService, FieldDataType fieldDataType) {
super(indexSettings.getIndex(), fieldNames, fieldDataType);
public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, CircuitBreakerService breakerService, FieldDataType fieldDataType) {
super(indexSettings.getIndex(), fieldName, fieldDataType);
this.indexSettings = indexSettings;
this.cache = cache;
this.breakerService = breakerService;
@ -57,7 +56,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i
@Override
public AtomicOrdinalsFieldData load(LeafReaderContext context) {
return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldNames.indexName());
return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldName);
}
@Override
@ -73,7 +72,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i
}
boolean fieldFound = false;
for (LeafReaderContext context : indexReader.leaves()) {
if (context.reader().getFieldInfos().fieldInfo(getFieldNames().indexName()) != null) {
if (context.reader().getFieldInfos().fieldInfo(getFieldName()) != null) {
fieldFound = true;
break;
}

View File

@ -94,7 +94,7 @@ public class FieldsVisitor extends StoredFieldVisitor {
}
// can't derive exact mapping type
for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {
MappedFieldType fieldType = mapperService.indexName(entry.getKey());
MappedFieldType fieldType = mapperService.fullName(entry.getKey());
if (fieldType == null) {
continue;
}
@ -112,7 +112,7 @@ public class FieldsVisitor extends StoredFieldVisitor {
if (fieldMapper == null) {
// it's possible index name doesn't match field name (legacy feature)
for (FieldMapper mapper : documentMapper.mappers()) {
if (mapper.fieldType().names().indexName().equals(indexName)) {
if (mapper.fieldType().name().equals(indexName)) {
fieldMapper = mapper;
break;
}

View File

@ -69,7 +69,7 @@ public class SingleFieldsVisitor extends FieldsVisitor {
if (fieldsValues == null) {
return;
}
List<Object> fieldValues = fieldsValues.get(fieldType.names().indexName());
List<Object> fieldValues = fieldsValues.get(fieldType.name());
if (fieldValues == null) {
return;
}

View File

@ -20,15 +20,15 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.AbstractMap;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
@ -37,44 +37,38 @@ import java.util.Set;
public final class DocumentFieldMappers implements Iterable<FieldMapper> {
/** Full field name to mapper */
private final CopyOnWriteHashMap<String, FieldMapper> fieldMappers;
private final Map<String, FieldMapper> fieldMappers;
private final FieldNameAnalyzer indexAnalyzer;
private final FieldNameAnalyzer searchAnalyzer;
private final FieldNameAnalyzer searchQuoteAnalyzer;
public DocumentFieldMappers(AnalysisService analysisService) {
this(new CopyOnWriteHashMap<String, FieldMapper>(),
new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
}
private DocumentFieldMappers(CopyOnWriteHashMap<String, FieldMapper> fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {
this.fieldMappers = fieldMappers;
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
this.searchQuoteAnalyzer = searchQuoteAnalyzer;
}
public DocumentFieldMappers copyAndAllAll(Collection<FieldMapper> newMappers) {
CopyOnWriteHashMap<String, FieldMapper> map = this.fieldMappers;
for (FieldMapper fieldMapper : newMappers) {
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
private static void put(Map<String, Analyzer> analyzers, String key, Analyzer value, Analyzer defaultValue) {
if (value == null) {
value = defaultValue;
}
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer())
));
FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer())
));
FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(newMappers.stream().map((input) ->
new AbstractMap.SimpleImmutableEntry<>(input.fieldType().names().indexName(), (Analyzer) input.fieldType().searchQuoteAnalyzer())
));
return new DocumentFieldMappers(map,indexAnalyzer,searchAnalyzer,searchQuoteAnalyzer);
analyzers.put(key, value);
}
/** Returns the mapper for the given field */
public DocumentFieldMappers(Collection<FieldMapper> mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) {
Map<String, FieldMapper> fieldMappers = new HashMap<>();
Map<String, Analyzer> indexAnalyzers = new HashMap<>();
Map<String, Analyzer> searchAnalyzers = new HashMap<>();
Map<String, Analyzer> searchQuoteAnalyzers = new HashMap<>();
for (FieldMapper mapper : mappers) {
fieldMappers.put(mapper.name(), mapper);
MappedFieldType fieldType = mapper.fieldType();
put(indexAnalyzers, fieldType.name(), fieldType.indexAnalyzer(), defaultIndex);
put(searchAnalyzers, fieldType.name(), fieldType.searchAnalyzer(), defaultSearch);
put(searchQuoteAnalyzers, fieldType.name(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote);
}
this.fieldMappers = Collections.unmodifiableMap(fieldMappers);
this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers);
this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers);
this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers);
}
/** Returns the mapper for the given field */
public FieldMapper getMapper(String field) {
return fieldMappers.get(field);
}
@ -82,10 +76,10 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
public Collection<String> simpleMatchToFullName(String pattern) {
Set<String> fields = new HashSet<>();
for (FieldMapper fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
fields.add(fieldMapper.fieldType().names().fullName());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
fields.add(fieldMapper.fieldType().names().fullName());
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) {
fields.add(fieldMapper.fieldType().name());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) {
fields.add(fieldMapper.fieldType().name());
}
}
return fields;
@ -97,7 +91,7 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
return fieldMapper;
}
for (FieldMapper otherFieldMapper : this) {
if (otherFieldMapper.fieldType().names().indexName().equals(name)) {
if (otherFieldMapper.fieldType().name().equals(name)) {
return otherFieldMapper;
}
}
@ -112,14 +106,6 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
return this.indexAnalyzer;
}
/**
* A smart analyzer used for indexing that takes into account specific analyzers configured
* per {@link FieldMapper} with a custom default analyzer for no explicit field analyzer.
*/
public Analyzer indexAnalyzer(Analyzer defaultAnalyzer) {
return new FieldNameAnalyzer(indexAnalyzer.analyzers(), defaultAnalyzer);
}
/**
* A smart analyzer used for searching that takes into account specific analyzers configured
* per {@link FieldMapper}.

View File

@ -24,16 +24,15 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
@ -51,15 +50,12 @@ import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static java.util.Collections.emptyMap;
@ -72,16 +68,14 @@ public class DocumentMapper implements ToXContent {
private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
private final Settings indexSettings;
private final RootObjectMapper rootObjectMapper;
private Map<String, Object> meta = emptyMap();
private final Mapper.BuilderContext builderContext;
public Builder(Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) {
this.indexSettings = indexSettings;
public Builder(RootObjectMapper.Builder builder, MapperService mapperService) {
final Settings indexSettings = mapperService.getIndexSettings().getSettings();
this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1));
this.rootObjectMapper = builder.build(builderContext);
@ -104,9 +98,14 @@ public class DocumentMapper implements ToXContent {
return this;
}
public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) {
public DocumentMapper build(MapperService mapperService) {
Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, metadataMappers, mapperService.mappingLock);
Mapping mapping = new Mapping(
mapperService.getIndexSettings().getIndexVersionCreated(),
rootObjectMapper,
metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]),
meta);
return new DocumentMapper(mapperService, mapping);
}
}
@ -115,38 +114,25 @@ public class DocumentMapper implements ToXContent {
private final String type;
private final Text typeText;
private volatile CompressedXContent mappingSource;
private final CompressedXContent mappingSource;
private volatile Mapping mapping;
private final Mapping mapping;
private final DocumentParser documentParser;
private volatile DocumentFieldMappers fieldMappers;
private final DocumentFieldMappers fieldMappers;
private volatile Map<String, ObjectMapper> objectMappers = Collections.emptyMap();
private final Map<String, ObjectMapper> objectMappers;
private boolean hasNestedObjects = false;
private final boolean hasNestedObjects;
private final ReleasableLock mappingWriteLock;
private final ReentrantReadWriteLock mappingLock;
public DocumentMapper(MapperService mapperService, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
Map<String, Object> meta,
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers,
ReentrantReadWriteLock mappingLock) {
public DocumentMapper(MapperService mapperService, Mapping mapping) {
this.mapperService = mapperService;
this.type = rootObjectMapper.name();
this.type = mapping.root().name();
this.typeText = new Text(this.type);
this.mapping = new Mapping(
Version.indexCreated(indexSettings),
rootObjectMapper,
metadataMappers.values().toArray(new MetadataFieldMapper[metadataMappers.values().size()]),
meta);
this.documentParser = new DocumentParser(indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock()));
this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
this.mappingLock = mappingLock;
final IndexSettings indexSettings = mapperService.getIndexSettings();
this.mapping = mapping;
this.documentParser = new DocumentParser(indexSettings, mapperService.documentMapperParser(), this);
if (metadataMapper(ParentFieldMapper.class).active()) {
// mark the routing field mapper as required
@ -163,7 +149,11 @@ public class DocumentMapper implements ToXContent {
}
MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers);
this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(newFieldMappers);
final AnalysisService analysisService = mapperService.analysisService();
this.fieldMappers = new DocumentFieldMappers(newFieldMappers,
analysisService.defaultIndexAnalyzer(),
analysisService.defaultSearchAnalyzer(),
analysisService.defaultSearchQuoteAnalyzer());
Map<String, ObjectMapper> builder = new HashMap<>();
for (ObjectMapper objectMapper : newObjectMappers) {
@ -173,14 +163,20 @@ public class DocumentMapper implements ToXContent {
}
}
boolean hasNestedObjects = false;
this.objectMappers = Collections.unmodifiableMap(builder);
for (ObjectMapper objectMapper : newObjectMappers) {
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
}
this.hasNestedObjects = hasNestedObjects;
refreshSource();
try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
} catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
}
}
public Mapping mapping() {
@ -334,46 +330,17 @@ public class DocumentMapper implements ToXContent {
return mapperService.getParentTypes().contains(type);
}
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread();
// update mappers for this document type
Map<String, ObjectMapper> builder = new HashMap<>(this.objectMappers);
for (ObjectMapper objectMapper : objectMappers) {
builder.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
}
this.objectMappers = Collections.unmodifiableMap(builder);
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
// finally update for the entire index
mapperService.addMappers(type, objectMappers, fieldMappers);
public DocumentMapper merge(Mapping mapping, boolean updateAllTypes) {
Mapping merged = this.mapping.merge(mapping, updateAllTypes);
return new DocumentMapper(mapperService, merged);
}
public void merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
mapperService.checkMappersCompatibility(type, mapping, updateAllTypes);
// do the merge even if simulate == false so that we get exceptions
Mapping merged = this.mapping.merge(mapping, updateAllTypes);
if (simulate == false) {
this.mapping = merged;
Collection<ObjectMapper> objectMappers = new ArrayList<>();
Collection<FieldMapper> fieldMappers = new ArrayList<>(Arrays.asList(merged.metadataMappers));
MapperUtils.collect(merged.root, objectMappers, fieldMappers);
addMappers(objectMappers, fieldMappers, updateAllTypes);
refreshSource();
}
}
}
private void refreshSource() throws ElasticsearchGenerationException {
try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
} catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
}
/**
* Recursively update sub field types.
*/
public DocumentMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
Mapping updated = this.mapping.updateFieldType(fullNameToFieldType);
return new DocumentMapper(mapperService, updated);
}
public void close() {

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
@ -46,7 +45,6 @@ import static org.elasticsearch.index.mapper.MapperBuilders.doc;
public class DocumentMapperParser {
private final Settings indexSettings;
final MapperService mapperService;
final AnalysisService analysisService;
private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class);
@ -62,8 +60,7 @@ public class DocumentMapperParser {
public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService,
SimilarityService similarityService, MapperRegistry mapperRegistry) {
this.indexSettings = indexSettings.getSettings();
this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings);
this.parseFieldMatcher = new ParseFieldMatcher(indexSettings.getSettings());
this.mapperService = mapperService;
this.analysisService = analysisService;
this.similarityService = similarityService;
@ -110,7 +107,7 @@ public class DocumentMapperParser {
Mapper.TypeParser.ParserContext parserContext = parserContext(type);
// parse RootObjectMapper
DocumentMapper.Builder docBuilder = doc(indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService);
DocumentMapper.Builder docBuilder = doc((RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService);
Iterator<Map.Entry<String, Object>> iterator = mapping.entrySet().iterator();
// parse DocumentMapper
while(iterator.hasNext()) {
@ -137,7 +134,7 @@ public class DocumentMapperParser {
checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");
return docBuilder.build(mapperService, this);
return docBuilder.build(mapperService);
}
public static void checkNoRemainingFields(String fieldName, Map<String, Object> fieldNodeMap, Version indexVersionCreated) {

View File

@ -26,10 +26,9 @@ import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
@ -53,29 +52,21 @@ class DocumentParser implements Closeable {
private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() {
@Override
protected ParseContext.InternalParseContext initialValue() {
return new ParseContext.InternalParseContext(indexSettings, docMapperParser, docMapper, new ContentPath(0));
return new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, new ContentPath(0));
}
};
private final Settings indexSettings;
private final IndexSettings indexSettings;
private final DocumentMapperParser docMapperParser;
private final DocumentMapper docMapper;
private final ReleasableLock parseLock;
public DocumentParser(Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ReleasableLock parseLock) {
public DocumentParser(IndexSettings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper) {
this.indexSettings = indexSettings;
this.docMapperParser = docMapperParser;
this.docMapper = docMapper;
this.parseLock = parseLock;
}
public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
try (ReleasableLock lock = parseLock.acquire()){
return innerParseDocument(source);
}
}
private ParsedDocument innerParseDocument(SourceToParse source) throws MapperParsingException {
if (docMapper.type().equals(MapperService.DEFAULT_MAPPING)) {
throw new IllegalArgumentException("It is forbidden to index into the default mapping [" + MapperService.DEFAULT_MAPPING + "]");
}
@ -132,7 +123,7 @@ class DocumentParser implements Closeable {
// try to parse the next token, this should be null if the object is ended properly
// but will throw a JSON exception if the extra tokens is not valid JSON (this will be handled by the catch)
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0_beta1)
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)
&& source.parser() == null && parser != null) {
// only check for end of tokens if we created the parser here
token = parser.nextToken();

View File

@ -44,6 +44,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.StreamSupport;
public abstract class FieldMapper extends Mapper implements Cloneable {
@ -216,31 +217,12 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return builder;
}
protected MappedFieldType.Names buildNames(BuilderContext context) {
return new MappedFieldType.Names(buildIndexName(context), buildIndexNameClean(context), buildFullName(context));
}
protected String buildIndexName(BuilderContext context) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
return buildFullName(context);
}
String actualIndexName = indexName == null ? name : indexName;
return context.path().pathAsText(actualIndexName);
}
protected String buildIndexNameClean(BuilderContext context) {
if (context.indexCreatedVersion().onOrAfter(Version.V_2_0_0_beta1)) {
return buildFullName(context);
}
return indexName == null ? name : indexName;
}
protected String buildFullName(BuilderContext context) {
return context.path().pathAsText(name);
}
protected void setupFieldType(BuilderContext context) {
fieldType.setNames(buildNames(context));
fieldType.setName(buildFullName(context));
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
@ -267,7 +249,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
}
}
protected MappedFieldTypeReference fieldTypeRef;
protected MappedFieldType fieldType;
protected final MappedFieldType defaultFieldType;
protected MultiFields multiFields;
protected CopyTo copyTo;
@ -277,7 +259,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
super(simpleName);
assert indexSettings != null;
this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0_beta1);
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // the reference ctor freezes the field type
fieldType.freeze();
this.fieldType = fieldType;
defaultFieldType.freeze();
this.defaultFieldType = defaultFieldType;
this.multiFields = multiFields;
@ -286,27 +269,11 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
@Override
public String name() {
return fieldType().names().fullName();
return fieldType().name();
}
public MappedFieldType fieldType() {
return fieldTypeRef.get();
}
/** Returns a reference to the MappedFieldType for this mapper. */
public MappedFieldTypeReference fieldTypeReference() {
return fieldTypeRef;
}
/**
* Updates the reference to this field's MappedFieldType.
* Implementations should assert equality of the underlying field type
*/
public void setFieldTypeReference(MappedFieldTypeReference ref) {
if (ref.get().equals(fieldType()) == false) {
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
}
this.fieldTypeRef = ref;
return fieldType;
}
/**
@ -332,7 +299,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
}
multiFields.parse(this, context);
return null;
@ -350,10 +317,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return false;
}
@Override
public Iterator<Mapper> iterator() {
if (multiFields == null) {
return Collections.emptyIterator();
}
return multiFields.iterator();
}
@ -383,18 +348,32 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
if (mergeWith instanceof FieldMapper) {
mergedType = ((FieldMapper) mergeWith).contentType();
}
throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
}
FieldMapper fieldMergeWith = (FieldMapper) mergeWith;
multiFields = multiFields.merge(fieldMergeWith.multiFields);
// apply changeable values
MappedFieldType fieldType = fieldMergeWith.fieldType().clone();
fieldType.freeze();
fieldTypeRef.set(fieldType);
this.fieldType = fieldMergeWith.fieldType;
this.copyTo = fieldMergeWith.copyTo;
}
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
final MappedFieldType newFieldType = fullNameToFieldType.get(fieldType.name());
if (newFieldType == null) {
throw new IllegalStateException();
}
MultiFields updatedMultiFields = multiFields.updateFieldType(fullNameToFieldType);
if (fieldType == newFieldType && multiFields == updatedMultiFields) {
return this; // no change
}
FieldMapper updated = clone();
updated.fieldType = newFieldType;
updated.multiFields = updatedMultiFields;
return updated;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(simpleName());
@ -406,9 +385,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (indexCreatedBefore2x && (includeDefaults || !simpleName().equals(fieldType().names().originalIndexName()))) {
builder.field("index_name", fieldType().names().originalIndexName());
}
if (includeDefaults || fieldType().boost() != 1.0f) {
builder.field("boost", fieldType().boost());
@ -619,6 +595,27 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return new MultiFields(mappers);
}
public MultiFields updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
ImmutableOpenMap.Builder<String, FieldMapper> newMappersBuilder = null;
for (ObjectCursor<FieldMapper> cursor : mappers.values()) {
FieldMapper updated = cursor.value.updateFieldType(fullNameToFieldType);
if (updated != cursor.value) {
if (newMappersBuilder == null) {
newMappersBuilder = ImmutableOpenMap.builder(mappers);
}
newMappersBuilder.put(updated.simpleName(), updated);
}
}
if (newMappersBuilder == null) {
return this;
}
ImmutableOpenMap<String, FieldMapper> mappers = newMappersBuilder.build();
return new MultiFields(mappers);
}
public Iterator<Mapper> iterator() {
return StreamSupport.stream(mappers.values().spliterator(), false).map((p) -> (Mapper)p.value).iterator();
}

View File

@ -37,34 +37,22 @@ import java.util.Set;
class FieldTypeLookup implements Iterable<MappedFieldType> {
/** Full field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
final CopyOnWriteHashMap<String, MappedFieldType> fullNameToFieldType;
/** Full field name to types containing a mapping for this full name. */
private final CopyOnWriteHashMap<String, Set<String>> fullNameToTypes;
/** Index field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType;
/** Index field name to types containing a mapping for this index name. */
private final CopyOnWriteHashMap<String, Set<String>> indexNameToTypes;
final CopyOnWriteHashMap<String, Set<String>> fullNameToTypes;
/** Create a new empty instance. */
public FieldTypeLookup() {
fullNameToFieldType = new CopyOnWriteHashMap<>();
fullNameToTypes = new CopyOnWriteHashMap<>();
indexNameToFieldType = new CopyOnWriteHashMap<>();
indexNameToTypes = new CopyOnWriteHashMap<>();
}
private FieldTypeLookup(
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName,
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes,
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName,
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes) {
CopyOnWriteHashMap<String, MappedFieldType> fullName,
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes) {
this.fullNameToFieldType = fullName;
this.fullNameToTypes = fullNameToTypes;
this.indexNameToFieldType = indexName;
this.indexNameToTypes = indexNameToTypes;
}
private static CopyOnWriteHashMap<String, Set<String>> addType(CopyOnWriteHashMap<String, Set<String>> map, String key, String type) {
@ -89,47 +77,29 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
* from the provided fields. If a field already exists, the field type will be updated
* to use the new mappers field type.
*/
public FieldTypeLookup copyAndAddAll(String type, Collection<FieldMapper> newFieldMappers) {
public FieldTypeLookup copyAndAddAll(String type, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
Objects.requireNonNull(type, "type must not be null");
if (MapperService.DEFAULT_MAPPING.equals(type)) {
throw new IllegalArgumentException("Default mappings should not be added to the lookup");
}
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes = this.fullNameToTypes;
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> indexNameToTypes = this.indexNameToTypes;
for (FieldMapper fieldMapper : newFieldMappers) {
CopyOnWriteHashMap<String, MappedFieldType> fullName = this.fullNameToFieldType;
CopyOnWriteHashMap<String, Set<String>> fullNameToTypes = this.fullNameToTypes;
for (FieldMapper fieldMapper : fieldMappers) {
MappedFieldType fieldType = fieldMapper.fieldType();
MappedFieldTypeReference fullNameRef = fullName.get(fieldType.names().fullName());
MappedFieldTypeReference indexNameRef = indexName.get(fieldType.names().indexName());
if (fullNameRef == null && indexNameRef == null) {
// new field, just use the ref from this field mapper
fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldTypeReference());
indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldTypeReference());
} else if (fullNameRef == null) {
// this index name already exists, so copy over the reference
fullName = fullName.copyAndPut(fieldType.names().fullName(), indexNameRef);
indexNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(indexNameRef);
} else if (indexNameRef == null) {
// this full name already exists, so copy over the reference
indexName = indexName.copyAndPut(fieldType.names().indexName(), fullNameRef);
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(fullNameRef);
} else if (fullNameRef == indexNameRef) {
// the field already exists, so replace the reference in this mapper with the pre-existing one
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(fullNameRef);
} else {
// this new field bridges between two existing field names (a full and index name), which we cannot support
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
MappedFieldType fullNameFieldType = fullName.get(fieldType.name());
// is the update even legal?
checkCompatibility(type, fieldMapper, updateAllTypes);
if (fieldType != fullNameFieldType) {
fullName = fullName.copyAndPut(fieldType.name(), fieldMapper.fieldType());
}
fullNameToTypes = addType(fullNameToTypes, fieldType.names().fullName(), type);
indexNameToTypes = addType(indexNameToTypes, fieldType.names().indexName(), type);
fullNameToTypes = addType(fullNameToTypes, fieldType.name(), type);
}
return new FieldTypeLookup(fullName, fullNameToTypes, indexName, indexNameToTypes);
return new FieldTypeLookup(fullName, fullNameToTypes);
}
private static boolean beStrict(String type, Set<String> types, boolean updateAllTypes) {
@ -145,42 +115,26 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
}
/**
* Checks if the given mappers' field types are compatible with existing field types.
* If any are not compatible, an IllegalArgumentException is thrown.
* Checks if the given field type is compatible with an existing field type.
* An IllegalArgumentException is thrown in case of incompatibility.
* If updateAllTypes is true, only basic compatibility is checked.
*/
public void checkCompatibility(String type, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
for (FieldMapper fieldMapper : fieldMappers) {
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
if (ref != null) {
List<String> conflicts = new ArrayList<>();
final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().names().fullName());
boolean strict = beStrict(type, types, updateAllTypes);
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types:\n" + conflicts.toString());
}
}
// field type for the index name must be compatible too
MappedFieldTypeReference indexNameRef = indexNameToFieldType.get(fieldMapper.fieldType().names().indexName());
if (indexNameRef != null) {
List<String> conflicts = new ArrayList<>();
final Set<String> types = indexNameToTypes.get(fieldMapper.fieldType().names().indexName());
boolean strict = beStrict(type, types, updateAllTypes);
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString());
}
private void checkCompatibility(String type, FieldMapper fieldMapper, boolean updateAllTypes) {
MappedFieldType fieldType = fullNameToFieldType.get(fieldMapper.fieldType().name());
if (fieldType != null) {
List<String> conflicts = new ArrayList<>();
final Set<String> types = fullNameToTypes.get(fieldMapper.fieldType().name());
boolean strict = beStrict(type, types, updateAllTypes);
fieldType.checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().name() + "] conflicts with existing mapping in other types:\n" + conflicts.toString());
}
}
}
/** Returns the field for the given field */
public MappedFieldType get(String field) {
MappedFieldTypeReference ref = fullNameToFieldType.get(field);
if (ref == null) return null;
return ref.get();
return fullNameToFieldType.get(field);
}
/** Get the set of types that have a mapping for the given field. */
@ -192,53 +146,23 @@ class FieldTypeLookup implements Iterable<MappedFieldType> {
return types;
}
/** Returns the field type for the given index name */
public MappedFieldType getByIndexName(String field) {
MappedFieldTypeReference ref = indexNameToFieldType.get(field);
if (ref == null) return null;
return ref.get();
}
/** Get the set of types that have a mapping for the given field. */
public Set<String> getTypesByIndexName(String field) {
Set<String> types = indexNameToTypes.get(field);
if (types == null) {
types = Collections.emptySet();
}
return types;
}
/**
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
*/
public Collection<String> simpleMatchToIndexNames(String pattern) {
Set<String> fields = new HashSet<>();
for (MappedFieldType fieldType : this) {
if (Regex.simpleMatch(pattern, fieldType.names().fullName())) {
fields.add(fieldType.names().indexName());
} else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) {
fields.add(fieldType.names().indexName());
}
}
return fields;
}
/**
* Returns a list of the full names of a simple match regex like pattern against full name and index name.
*/
public Collection<String> simpleMatchToFullName(String pattern) {
Set<String> fields = new HashSet<>();
for (MappedFieldType fieldType : this) {
if (Regex.simpleMatch(pattern, fieldType.names().fullName())) {
fields.add(fieldType.names().fullName());
} else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) {
fields.add(fieldType.names().fullName());
if (Regex.simpleMatch(pattern, fieldType.name())) {
fields.add(fieldType.name());
} else if (Regex.simpleMatch(pattern, fieldType.name())) {
fields.add(fieldType.name());
}
}
return fields;
}
@Override
public Iterator<MappedFieldType> iterator() {
return fullNameToFieldType.values().stream().map((p) -> p.get()).iterator();
return fullNameToFieldType.values().iterator();
}
}

View File

@ -53,68 +53,6 @@ import java.util.Objects;
*/
public abstract class MappedFieldType extends FieldType {
public static class Names {
private final String indexName;
private final String originalIndexName;
private final String fullName;
public Names(String name) {
this(name, name, name);
}
public Names(String indexName, String originalIndexName, String fullName) {
this.indexName = indexName;
this.originalIndexName = originalIndexName;
this.fullName = fullName;
}
/**
* The indexed name of the field. This is the name under which we will
* store it in the index.
*/
public String indexName() {
return indexName;
}
/**
* The original index name, before any "path" modifications performed on it.
*/
public String originalIndexName() {
return originalIndexName;
}
/**
* The full name, including dot path.
*/
public String fullName() {
return fullName;
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Names names = (Names) o;
if (!fullName.equals(names.fullName)) return false;
if (!indexName.equals(names.indexName)) return false;
if (!originalIndexName.equals(names.originalIndexName)) return false;
return true;
}
@Override
public int hashCode() {
int result = indexName.hashCode();
result = 31 * result + originalIndexName.hashCode();
result = 31 * result + fullName.hashCode();
return result;
}
}
public enum Loading {
LAZY {
@Override
@ -155,7 +93,7 @@ public abstract class MappedFieldType extends FieldType {
}
}
private Names names;
private String name;
private float boost;
// TODO: remove this docvalues flag and use docValuesType
private boolean docValues;
@ -170,7 +108,7 @@ public abstract class MappedFieldType extends FieldType {
protected MappedFieldType(MappedFieldType ref) {
super(ref);
this.names = ref.names();
this.name = ref.name();
this.boost = ref.boost();
this.docValues = ref.hasDocValues();
this.indexAnalyzer = ref.indexAnalyzer();
@ -214,7 +152,7 @@ public abstract class MappedFieldType extends FieldType {
return boost == fieldType.boost &&
docValues == fieldType.docValues &&
Objects.equals(names, fieldType.names) &&
Objects.equals(name, fieldType.name) &&
Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) &&
Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) &&
Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) &&
@ -226,7 +164,7 @@ public abstract class MappedFieldType extends FieldType {
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString);
}
@ -238,7 +176,7 @@ public abstract class MappedFieldType extends FieldType {
/** Checks this type is the same type as other. Adds a conflict if they are different. */
private final void checkTypeName(MappedFieldType other) {
if (typeName().equals(other.typeName()) == false) {
throw new IllegalArgumentException("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
throw new IllegalArgumentException("mapper [" + name + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
} else if (getClass() != other.getClass()) {
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName());
}
@ -256,71 +194,68 @@ public abstract class MappedFieldType extends FieldType {
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
// TODO: should be validating if index options go "up" (but "down" is ok)
if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) {
conflicts.add("mapper [" + names().fullName() + "] has different [index] values");
conflicts.add("mapper [" + name() + "] has different [index] values");
}
if (stored() != other.stored()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store] values");
conflicts.add("mapper [" + name() + "] has different [store] values");
}
if (hasDocValues() == false && other.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set
// when the doc_values field data format is configured
conflicts.add("mapper [" + names().fullName() + "] has different [doc_values] values, cannot change from disabled to enabled");
conflicts.add("mapper [" + name() + "] has different [doc_values] values, cannot change from disabled to enabled");
}
if (omitNorms() && !other.omitNorms()) {
conflicts.add("mapper [" + names().fullName() + "] has different [omit_norms] values, cannot change from disable to enabled");
conflicts.add("mapper [" + name() + "] has different [omit_norms] values, cannot change from disable to enabled");
}
if (storeTermVectors() != other.storeTermVectors()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector] values");
conflicts.add("mapper [" + name() + "] has different [store_term_vector] values");
}
if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_offsets] values");
conflicts.add("mapper [" + name() + "] has different [store_term_vector_offsets] values");
}
if (storeTermVectorPositions() != other.storeTermVectorPositions()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_positions] values");
conflicts.add("mapper [" + name() + "] has different [store_term_vector_positions] values");
}
if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_payloads] values");
conflicts.add("mapper [" + name() + "] has different [store_term_vector_payloads] values");
}
// null and "default"-named index analyzers both mean the default is used
if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) {
if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
conflicts.add("mapper [" + name() + "] has different [analyzer]");
}
} else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) {
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
conflicts.add("mapper [" + name() + "] has different [analyzer]");
} else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
conflicts.add("mapper [" + name() + "] has different [analyzer]");
}
if (!names().indexName().equals(other.names().indexName())) {
conflicts.add("mapper [" + names().fullName() + "] has different [index_name]");
}
if (Objects.equals(similarity(), other.similarity()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [similarity]");
conflicts.add("mapper [" + name() + "] has different [similarity]");
}
if (strict) {
if (omitNorms() != other.omitNorms()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types.");
}
if (boost() != other.boost()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
}
if (normsLoading() != other.normsLoading()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types.");
}
if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
}
if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types.");
}
if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
}
if (Objects.equals(nullValue(), other.nullValue()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types.");
}
}
}
@ -333,13 +268,13 @@ public abstract class MappedFieldType extends FieldType {
return true;
}
public Names names() {
return names;
public String name() {
return name;
}
public void setNames(Names names) {
public void setName(String name) {
checkIfFrozen();
this.names = names;
this.name = name;
}
public float boost() {
@ -456,7 +391,7 @@ public abstract class MappedFieldType extends FieldType {
/** Creates a term associated with the field of this mapper for the given value */
protected Term createTerm(Object value) {
return new Term(names().indexName(), indexedValueForSearch(value));
return new Term(name(), indexedValueForSearch(value));
}
public Query termQuery(Object value, @Nullable QueryShardContext context) {
@ -468,11 +403,11 @@ public abstract class MappedFieldType extends FieldType {
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsQuery(names.indexName(), bytesRefs);
return new TermsQuery(name(), bytesRefs);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return new TermRangeQuery(names().indexName(),
return new TermRangeQuery(name(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
@ -492,7 +427,7 @@ public abstract class MappedFieldType extends FieldType {
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
if (numericType() != null) {
throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + names.fullName + "]");
throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + name + "]");
}
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);

View File

@ -1,41 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
/**
* A container for a {@link MappedFieldType} which can be updated and is reference counted.
*/
public class MappedFieldTypeReference {
private MappedFieldType fieldType; // the current field type this reference points to
public MappedFieldTypeReference(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
}
public MappedFieldType get() {
return fieldType;
}
public void set(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
}
}

View File

@ -177,4 +177,11 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
/** Return the merge of {@code mergeWith} into this.
* Both {@code this} and {@code mergeWith} will be left unmodified. */
public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes);
/**
* Update the field type of this mapper. This is necessary because some mapping updates
* can modify mappings across several types. This method must return a copy of the mapper
* so that the current mapper is not modified.
*/
public abstract Mapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType);
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.core.ByteFieldMapper;
@ -41,8 +40,8 @@ public final class MapperBuilders {
private MapperBuilders() {}
public static DocumentMapper.Builder doc(Settings settings, RootObjectMapper.Builder objectBuilder, MapperService mapperService) {
return new DocumentMapper.Builder(settings, objectBuilder, mapperService);
public static DocumentMapper.Builder doc(RootObjectMapper.Builder objectBuilder, MapperService mapperService) {
return new DocumentMapper.Builder(objectBuilder, mapperService);
}
public static RootObjectMapper.Builder rootObject(String name) {

View File

@ -35,11 +35,9 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
@ -65,7 +63,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import java.util.stream.Collectors;
@ -98,12 +95,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
private volatile Map<String, DocumentMapper> mappers = emptyMap();
// A lock for mappings: modifications (put mapping) need to be performed
// under the write lock and read operations (document parsing) need to be
// performed under the read lock
final ReentrantReadWriteLock mappingLock = new ReentrantReadWriteLock();
private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
private volatile FieldTypeLookup fieldTypes;
private volatile Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>();
private boolean hasNested = false; // updated dynamically to true when a nested object is added
@ -216,7 +207,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
DocumentMapper mapper = documentParser.parse(type, mappingSource);
// still add it as a document mapper so we have it registered and, for example, persisted back into
// the cluster meta data if needed, or checked for existence
try (ReleasableLock lock = mappingWriteLock.acquire()) {
synchronized (this) {
mappers = newMapBuilder(mappers).put(type, mapper).map();
}
try {
@ -226,7 +217,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
return mapper;
} else {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
synchronized (this) {
// only apply the default mapping if we don't have the type yet
applyDefault &= mappers.containsKey(type) == false;
return merge(parse(type, mappingSource, applyDefault), updateAllTypes);
@ -234,9 +225,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
// never expose this to the outside world, we need to reparse the doc mapper so we get fresh
// instances of field mappers to properly remove existing doc mapper
private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) {
private synchronized DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) {
if (mapper.type().length() == 0) {
throw new InvalidTypeNameException("mapping type name is empty");
}
@ -262,34 +251,89 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
logger.warn("Type [{}] starts with a '.', it is recommended not to start a type name with a '.'", mapper.type());
}
}
// we can add new field/object mappers while the old ones are there
// since we get new instances of those, and when we remove, we remove
// by instance equality
// 1. compute the merged DocumentMapper
DocumentMapper oldMapper = mappers.get(mapper.type());
DocumentMapper newMapper;
if (oldMapper != null) {
oldMapper.merge(mapper.mapping(), false, updateAllTypes);
return oldMapper;
newMapper = oldMapper.merge(mapper.mapping(), updateAllTypes);
} else {
Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> newMappers = checkMappersCompatibility(
mapper.type(), mapper.mapping(), updateAllTypes);
Collection<ObjectMapper> newObjectMappers = newMappers.v1();
Collection<FieldMapper> newFieldMappers = newMappers.v2();
addMappers(mapper.type(), newObjectMappers, newFieldMappers);
newMapper = mapper;
}
// 2. check basic sanity of the new mapping
List<ObjectMapper> objectMappers = new ArrayList<>();
List<FieldMapper> fieldMappers = new ArrayList<>();
Collections.addAll(fieldMappers, newMapper.mapping().metadataMappers);
MapperUtils.collect(newMapper.mapping().root(), objectMappers, fieldMappers);
checkFieldUniqueness(newMapper.type(), objectMappers, fieldMappers);
checkObjectsCompatibility(newMapper.type(), objectMappers, fieldMappers, updateAllTypes);
// 3. update lookup data-structures
// this will in particular make sure that the merged fields are compatible with other types
FieldTypeLookup fieldTypes = this.fieldTypes.copyAndAddAll(newMapper.type(), fieldMappers, updateAllTypes);
boolean hasNested = this.hasNested;
Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers);
for (ObjectMapper objectMapper : objectMappers) {
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNested = true;
}
}
fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers);
Set<String> parentTypes = this.parentTypes;
if (oldMapper == null && newMapper.parentFieldMapper().active()) {
parentTypes = new HashSet<>(parentTypes.size() + 1);
parentTypes.addAll(this.parentTypes);
parentTypes.add(mapper.parentFieldMapper().type());
parentTypes = Collections.unmodifiableSet(parentTypes);
}
Map<String, DocumentMapper> mappers = new HashMap<>(this.mappers);
mappers.put(newMapper.type(), newMapper);
for (Map.Entry<String, DocumentMapper> entry : mappers.entrySet()) {
if (entry.getKey().equals(DEFAULT_MAPPING)) {
continue;
}
DocumentMapper m = entry.getValue();
// apply changes to the field types back
m = m.updateFieldType(fieldTypes.fullNameToFieldType);
entry.setValue(m);
}
mappers = Collections.unmodifiableMap(mappers);
// 4. commit the change
this.mappers = mappers;
this.fieldTypes = fieldTypes;
this.hasNested = hasNested;
this.fullPathObjectMappers = fullPathObjectMappers;
this.parentTypes = parentTypes;
// 5. send notifications about the change
if (oldMapper == null) {
// means the mapping was created
for (DocumentTypeListener typeListener : typeListeners) {
typeListener.beforeCreate(mapper);
}
mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map();
if (mapper.parentFieldMapper().active()) {
Set<String> newParentTypes = new HashSet<>(parentTypes.size() + 1);
newParentTypes.addAll(parentTypes);
newParentTypes.add(mapper.parentFieldMapper().type());
parentTypes = unmodifiableSet(newParentTypes);
}
assert assertSerialization(mapper);
return mapper;
}
assert assertSerialization(newMapper);
assert assertMappersShareSameFieldType();
return newMapper;
}
private boolean assertMappersShareSameFieldType() {
for (DocumentMapper mapper : docMappers(false)) {
List<FieldMapper> fieldMappers = new ArrayList<>();
Collections.addAll(fieldMappers, mapper.mapping().metadataMappers);
MapperUtils.collect(mapper.root(), new ArrayList<ObjectMapper>(), fieldMappers);
for (FieldMapper fieldMapper : fieldMappers) {
assert fieldMapper.fieldType() == fieldTypes.get(fieldMapper.name()) : fieldMapper.name();
}
}
return true;
}
private boolean typeNameStartsWithIllegalDot(DocumentMapper mapper) {
@ -339,8 +383,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
protected void checkMappersCompatibility(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread();
private void checkObjectsCompatibility(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert Thread.holdsLock(this);
checkFieldUniqueness(type, objectMappers, fieldMappers);
@ -358,31 +402,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
throw new IllegalArgumentException("Field [" + fieldMapper.name() + "] is defined as a field in mapping [" + type + "] but this name is already used for an object in other types");
}
}
fieldTypes.checkCompatibility(type, fieldMappers, updateAllTypes);
}
protected Tuple<Collection<ObjectMapper>, Collection<FieldMapper>> checkMappersCompatibility(
String type, Mapping mapping, boolean updateAllTypes) {
List<ObjectMapper> objectMappers = new ArrayList<>();
List<FieldMapper> fieldMappers = new ArrayList<>();
Collections.addAll(fieldMappers, mapping.metadataMappers);
MapperUtils.collect(mapping.root, objectMappers, fieldMappers);
checkMappersCompatibility(type, objectMappers, fieldMappers, updateAllTypes);
return new Tuple<>(objectMappers, fieldMappers);
}
protected void addMappers(String type, Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
assert mappingLock.isWriteLockedByCurrentThread();
Map<String, ObjectMapper> fullPathObjectMappers = new HashMap<>(this.fullPathObjectMappers);
for (ObjectMapper objectMapper : objectMappers) {
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNested = true;
}
}
this.fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers);
this.fieldTypes = this.fieldTypes.copyAndAddAll(type, fieldMappers);
}
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
@ -532,15 +551,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
}
/**
* Returns an {@link MappedFieldType} which has the given index name.
*
* If multiple types have fields with the same index name, the first is returned.
*/
public MappedFieldType indexName(String indexName) {
return fieldTypes.getByIndexName(indexName);
}
/**
* Returns the {@link MappedFieldType} for the give fullName.
*
@ -559,32 +569,13 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
// no wildcards
return Collections.singletonList(pattern);
}
return fieldTypes.simpleMatchToIndexNames(pattern);
return fieldTypes.simpleMatchToFullName(pattern);
}
// TODO: remove this since the underlying index names are now the same across all types
public Collection<String> simpleMatchToIndexNames(String pattern, @Nullable String[] types) {
return simpleMatchToIndexNames(pattern);
}
// TODO: remove types param, since the object mapper must be the same across all types
public ObjectMapper getObjectMapper(String name, @Nullable String[] types) {
public ObjectMapper getObjectMapper(String name) {
return fullPathObjectMappers.get(name);
}
public MappedFieldType smartNameFieldType(String smartName) {
MappedFieldType fieldType = fullName(smartName);
if (fieldType != null) {
return fieldType;
}
return indexName(smartName);
}
// TODO: remove this since the underlying index names are now the same across all types
public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) {
return smartNameFieldType(smartName);
}
/**
* Given a type (eg. long, string, ...), return an anonymous field mapper that can be used for search operations.
*/
@ -678,7 +669,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
MappedFieldType fieldType = smartNameFieldType(fieldName);
MappedFieldType fieldType = fullName(fieldName);
if (fieldType != null) {
Analyzer analyzer = extractAnalyzer.apply(fieldType);
if (analyzer != null) {

View File

@ -93,7 +93,7 @@ public final class Mapping implements ToXContent {
return (T) metadataMappersMap.get(clazz);
}
/** @see DocumentMapper#merge(Mapping, boolean, boolean) */
/** @see DocumentMapper#merge(Mapping, boolean) */
public Mapping merge(Mapping mergeWith, boolean updateAllTypes) {
RootObjectMapper mergedRoot = root.merge(mergeWith.root, updateAllTypes);
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> mergedMetaDataMappers = new HashMap<>(metadataMappersMap);
@ -110,6 +110,18 @@ public final class Mapping implements ToXContent {
return new Mapping(indexCreated, mergedRoot, mergedMetaDataMappers.values().toArray(new MetadataFieldMapper[0]), mergeWith.meta);
}
/**
* Recursively update sub field types.
*/
public Mapping updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
final MetadataFieldMapper[] updatedMeta = Arrays.copyOf(metadataMappers, metadataMappers.length);
for (int i = 0; i < updatedMeta.length; ++i) {
updatedMeta[i] = (MetadataFieldMapper) updatedMeta[i].updateFieldType(fullNameToFieldType);
}
RootObjectMapper updatedRoot = root.updateFieldType(fullNameToFieldType);
return new Mapping(indexCreated, updatedRoot, updatedMeta, meta);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
root.toXContent(builder, params, new ToXContent() {

View File

@ -170,14 +170,14 @@ public class BinaryFieldMapper extends FieldMapper {
return;
}
if (fieldType().stored()) {
fields.add(new Field(fieldType().names().indexName(), value, fieldType()));
fields.add(new Field(fieldType().name(), value, fieldType()));
}
if (fieldType().hasDocValues()) {
CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName());
CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().name());
if (field == null) {
field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value);
context.doc().addWithKey(fieldType().names().indexName(), field);
field = new CustomBinaryDocValuesField(fieldType().name(), value);
context.doc().addWithKey(fieldType().name(), field);
} else {
field.add(value);
}

View File

@ -222,9 +222,9 @@ public class BooleanFieldMapper extends FieldMapper {
if (value == null) {
return;
}
fields.add(new Field(fieldType().names().indexName(), value ? "T" : "F", fieldType()));
fields.add(new Field(fieldType().name(), value ? "T" : "F", fieldType()));
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value ? 1 : 0));
fields.add(new SortedNumericDocValuesField(fieldType().name(), value ? 1 : 0));
}
}

View File

@ -161,7 +161,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
@ -171,7 +171,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
byte iValue = parseValue(value);
byte iSim = fuzziness.asByte();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -238,7 +238,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).byteValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Byte.toString(value), boost);
context.allEntries().addText(fieldType().name(), Byte.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -249,7 +249,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -278,7 +278,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
} else {
value = (byte) parser.shortValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
context.allEntries().addText(fieldType().name(), parser.text(), boost);
}
}
}

View File

@ -326,15 +326,15 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
CompletionFieldType other = (CompletionFieldType)fieldType;
if (preservePositionIncrements != other.preservePositionIncrements) {
conflicts.add("mapper [" + names().fullName() + "] has different [preserve_position_increments] values");
conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values");
}
if (preserveSep != other.preserveSep) {
conflicts.add("mapper [" + names().fullName() + "] has different [preserve_separators] values");
conflicts.add("mapper [" + name() + "] has different [preserve_separators] values");
}
if (hasContextMappings() != other.hasContextMappings()) {
conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values");
conflicts.add("mapper [" + name() + "] has different [context_mappings] values");
} else if (hasContextMappings() && contextMappings.equals(other.contextMappings) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values");
conflicts.add("mapper [" + name() + "] has different [context_mappings] values");
}
}
@ -446,7 +446,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
Token token = parser.currentToken();
Map<String, CompletionInputMetaData> inputMap = new HashMap<>(1);
if (token == Token.VALUE_NULL) {
throw new MapperParsingException("completion field [" + fieldType().names().fullName() + "] does not support null values");
throw new MapperParsingException("completion field [" + fieldType().name() + "] does not support null values");
} else if (token == Token.START_ARRAY) {
while ((token = parser.nextToken()) != Token.END_ARRAY) {
parse(context, token, parser, inputMap);
@ -469,10 +469,10 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
}
CompletionInputMetaData metaData = completionInput.getValue();
if (fieldType().hasContextMappings()) {
fieldType().getContextMappings().addField(context.doc(), fieldType().names().indexName(),
fieldType().getContextMappings().addField(context.doc(), fieldType().name(),
input, metaData.weight, metaData.contexts);
} else {
context.doc().add(new SuggestField(fieldType().names().indexName(), input, metaData.weight));
context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight));
}
}
multiFields.parse(this, context);
@ -536,7 +536,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
weight = weightValue.intValue();
} else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) {
if (fieldType().hasContextMappings() == false) {
throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().names().fullName() + "]");
throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().name() + "]");
}
ContextMappings contextMappings = fieldType().getContextMappings();
XContentParser.Token currentToken = parser.currentToken();

View File

@ -249,7 +249,7 @@ public class DateFieldMapper extends NumberFieldMapper {
@Override
public String toString(String s) {
final StringBuilder sb = new StringBuilder();
return sb.append(names().indexName()).append(':')
return sb.append(name()).append(':')
.append(includeLower ? '[' : '{')
.append((lowerTerm == null) ? "*" : lowerTerm.toString())
.append(" TO ")
@ -306,13 +306,13 @@ public class DateFieldMapper extends NumberFieldMapper {
if (strict) {
DateFieldType other = (DateFieldType)fieldType;
if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [format] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types.");
}
if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types.");
}
if (Objects.equals(timeUnit(), other.timeUnit()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types.");
}
}
}
@ -404,7 +404,7 @@ public class DateFieldMapper extends NumberFieldMapper {
// not a time format
iSim = fuzziness.asLong();
}
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -424,7 +424,7 @@ public class DateFieldMapper extends NumberFieldMapper {
}
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser),
includeLower, includeUpper);
@ -516,7 +516,7 @@ public class DateFieldMapper extends NumberFieldMapper {
Long value = null;
if (dateAsString != null) {
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), dateAsString, boost);
context.allEntries().addText(fieldType().name(), dateAsString, boost);
}
value = fieldType().parseStringValue(dateAsString);
}

View File

@ -164,7 +164,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseDoubleValue(lowerTerm),
upperTerm == null ? null : parseDoubleValue(upperTerm),
includeLower, includeUpper);
@ -174,7 +174,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
double iValue = parseDoubleValue(value);
double iSim = fuzziness.asDouble();
return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -230,7 +230,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).doubleValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Double.toString(value), boost);
context.allEntries().addText(fieldType().name(), Double.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -241,7 +241,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -270,7 +270,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
} else {
value = parser.doubleValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
context.allEntries().addText(fieldType().name(), parser.text(), boost);
}
}
}

View File

@ -165,7 +165,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
@ -175,7 +175,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
float iValue = parseValue(value);
final float iSim = fuzziness.asFloat();
return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newFloatRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -242,7 +242,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).floatValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Float.toString(value), boost);
context.allEntries().addText(fieldType().name(), Float.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -253,7 +253,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -282,7 +282,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
} else {
value = parser.floatValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
context.allEntries().addText(fieldType().name(), parser.text(), boost);
}
}
}

View File

@ -170,7 +170,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
@ -180,7 +180,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
int iValue = parseValue(value);
int iSim = fuzziness.asInt();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -247,7 +247,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).intValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Integer.toString(value), boost);
context.allEntries().addText(fieldType().name(), Integer.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -258,7 +258,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -287,7 +287,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
} else {
value = parser.intValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
context.allEntries().addText(fieldType().name(), parser.text(), boost);
}
}
}

View File

@ -168,7 +168,7 @@ public class LongFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseLongValue(lowerTerm),
upperTerm == null ? null : parseLongValue(upperTerm),
includeLower, includeUpper);
@ -178,7 +178,7 @@ public class LongFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = parseLongValue(value);
final long iSim = fuzziness.asLong();
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -235,7 +235,7 @@ public class LongFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).longValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Long.toString(value), boost);
context.allEntries().addText(fieldType().name(), Long.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -246,7 +246,7 @@ public class LongFieldMapper extends NumberFieldMapper {
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -275,7 +275,7 @@ public class LongFieldMapper extends NumberFieldMapper {
} else {
value = parser.longValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
context.allEntries().addText(fieldType().name(), parser.text(), boost);
}
}
}

View File

@ -144,7 +144,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
List<String> conflicts, boolean strict) {
super.checkCompatibility(other, conflicts, strict);
if (numericPrecisionStep() != other.numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step] values");
conflicts.add("mapper [" + name() + "] has different [precision_step] values");
}
}
@ -243,7 +243,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
protected abstract void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException;
protected final void addDocValue(ParseContext context, List<Field> fields, long value) {
fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value));
fields.add(new SortedNumericDocValuesField(fieldType().name(), value));
}
/**
@ -329,7 +329,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM
};
public CustomNumericField(Number value, MappedFieldType fieldType) {
super(fieldType.names().indexName(), fieldType);
super(fieldType.name(), fieldType);
if (value != null) {
this.fieldsData = value;
}

View File

@ -166,7 +166,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
@ -176,7 +176,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = parseValue(value);
short iSim = fuzziness.asShort();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newIntRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -243,7 +243,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
value = ((Number) externalValue).shortValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Short.toString(value), boost);
context.allEntries().addText(fieldType().name(), Short.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
@ -254,7 +254,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
@ -283,7 +283,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
} else {
value = parser.shortValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
context.allEntries().addText(fieldType().name(), parser.text(), boost);
}
}
}

View File

@ -248,7 +248,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values");
throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values");
}
this.positionIncrementGap = positionIncrementGap;
this.ignoreAbove = ignoreAbove;
@ -315,19 +315,19 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), valueAndBoost.value(), valueAndBoost.boost());
context.allEntries().addText(fieldType().name(), valueAndBoost.value(), valueAndBoost.boost());
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), valueAndBoost.value(), fieldType());
Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType());
field.setBoost(valueAndBoost.boost());
fields.add(field);
}
if (fieldType().hasDocValues()) {
fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(valueAndBoost.value())));
fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value())));
}
if (fields.isEmpty()) {
context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value());
context.ignoredValue(fieldType().name(), valueAndBoost.value());
}
}

View File

@ -148,7 +148,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper {
addIntegerFields(context, fields, count, valueAndBoost.boost());
}
if (fields.isEmpty()) {
context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value());
context.ignoredValue(fieldType().name(), valueAndBoost.value());
}
}

View File

@ -241,10 +241,7 @@ public class TypeParsers {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());
final Object propNode = entry.getValue();
if (propName.equals("index_name") && indexVersionCreated.before(Version.V_2_0_0_beta1)) {
builder.indexName(propNode.toString());
iterator.remove();
} else if (propName.equals("store")) {
if (propName.equals("store")) {
builder.store(parseStore(name, propNode.toString()));
iterator.remove();
} else if (propName.equals("index")) {

View File

@ -287,20 +287,20 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
super.checkCompatibility(fieldType, conflicts, strict);
GeoPointFieldType other = (GeoPointFieldType)fieldType;
if (isLatLonEnabled() != other.isLatLonEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]");
conflicts.add("mapper [" + name() + "] has different [lat_lon]");
}
if (isLatLonEnabled() && other.isLatLonEnabled() &&
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
conflicts.add("mapper [" + name() + "] has different [precision_step]");
}
if (isGeoHashEnabled() != other.isGeoHashEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash]");
conflicts.add("mapper [" + name() + "] has different [geohash]");
}
if (geoHashPrecision() != other.geoHashPrecision()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]");
conflicts.add("mapper [" + name() + "] has different [geohash_precision]");
}
if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]");
conflicts.add("mapper [" + name() + "] has different [geohash_prefix]");
}
}
@ -346,11 +346,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
}
}
protected final DoubleFieldMapper latMapper;
protected DoubleFieldMapper latMapper;
protected final DoubleFieldMapper lonMapper;
protected DoubleFieldMapper lonMapper;
protected final StringFieldMapper geoHashMapper;
protected StringFieldMapper geoHashMapper;
protected Explicit<Boolean> ignoreMalformed;
@ -504,4 +504,25 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
}
}
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType);
StringFieldMapper geoUpdated = geoHashMapper == null ? null : (StringFieldMapper) geoHashMapper.updateFieldType(fullNameToFieldType);
DoubleFieldMapper latUpdated = latMapper == null ? null : (DoubleFieldMapper) latMapper.updateFieldType(fullNameToFieldType);
DoubleFieldMapper lonUpdated = lonMapper == null ? null : (DoubleFieldMapper) lonMapper.updateFieldType(fullNameToFieldType);
if (updated == this
&& geoUpdated == geoHashMapper
&& latUpdated == latMapper
&& lonUpdated == lonMapper) {
return this;
}
if (updated == this) {
updated = (BaseGeoPointFieldMapper) updated.clone();
}
updated.geoHashMapper = geoUpdated;
updated.latMapper = latUpdated;
updated.lonMapper = lonUpdated;
return updated;
}
}

View File

@ -123,7 +123,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
GeoUtils.normalizePoint(point);
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
context.doc().add(new GeoPointField(fieldType().names().indexName(), point.lon(), point.lat(), fieldType() ));
context.doc().add(new GeoPointField(fieldType().name(), point.lon(), point.lat(), fieldType() ));
}
super.parse(context, point, geoHash);
}

View File

@ -301,7 +301,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith;
if (gpfmMergeWith.coerce.explicit()) {
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] has different [coerce]");
}
}
@ -330,17 +330,17 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
context.doc().add(field);
}
super.parse(context, point, geoHash);
if (fieldType().hasDocValues()) {
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName());
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().name());
if (field == null) {
field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon());
context.doc().addWithKey(fieldType().names().indexName(), field);
field = new CustomGeoPointDocValuesField(fieldType().name(), point.lat(), point.lon());
context.doc().addWithKey(fieldType().name(), field);
} else {
field.add(point.lat(), point.lon());
}

View File

@ -105,7 +105,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
static {
// setting name here is a hack so freeze can be called...instead all these options should be
// moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers...
FIELD_TYPE.setNames(new MappedFieldType.Names("DoesNotExist"));
FIELD_TYPE.setName("DoesNotExist");
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
@ -278,10 +278,10 @@ public class GeoShapeFieldMapper extends FieldMapper {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, names().indexName());
recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, name());
recursiveStrategy.setDistErrPct(distanceErrorPct());
recursiveStrategy.setPruneLeafyBranches(false);
termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, names().indexName());
termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, name());
termStrategy.setDistErrPct(distanceErrorPct());
defaultStrategy = resolveStrategy(strategyName);
defaultStrategy.setPointsOnly(pointsOnly);
@ -293,33 +293,33 @@ public class GeoShapeFieldMapper extends FieldMapper {
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
// prevent user from changing strategies
if (strategyName().equals(other.strategyName()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [strategy]");
conflicts.add("mapper [" + name() + "] has different [strategy]");
}
// prevent user from changing trees (changes encoding)
if (tree().equals(other.tree()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [tree]");
conflicts.add("mapper [" + name() + "] has different [tree]");
}
if ((pointsOnly() != other.pointsOnly())) {
conflicts.add("mapper [" + names().fullName() + "] has different points_only");
conflicts.add("mapper [" + name() + "] has different points_only");
}
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (treeLevels() != other.treeLevels()) {
conflicts.add("mapper [" + names().fullName() + "] has different [tree_levels]");
conflicts.add("mapper [" + name() + "] has different [tree_levels]");
}
if (precisionInMeters() != other.precisionInMeters()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision]");
conflicts.add("mapper [" + name() + "] has different [precision]");
}
if (strict) {
if (orientation() != other.orientation()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [orientation] across all types.");
}
if (distanceErrorPct() != other.distanceErrorPct()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [distance_error_pct] across all types.");
}
}
}
@ -450,7 +450,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
shape = shapeBuilder.build();
}
if (fieldType().pointsOnly() && !(shape instanceof Point)) {
throw new MapperParsingException("[{" + fieldType().names().fullName() + "}] is configured for points only but a " +
throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " +
((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + " was found");
}
Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape);
@ -464,7 +464,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e);
}
return null;
}

View File

@ -91,7 +91,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
}
@ -246,7 +246,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
// reset the entries
context.allEntries().reset();
Analyzer analyzer = findAnalyzer(context);
fields.add(new AllField(fieldType().names().indexName(), context.allEntries(), analyzer, fieldType()));
fields.add(new AllField(fieldType().name(), context.allEntries(), analyzer, fieldType()));
}
private Analyzer findAnalyzer(ParseContext context) {
@ -323,7 +323,7 @@ public class AllFieldMapper extends MetadataFieldMapper {
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) {
throw new IllegalArgumentException("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
}
super.doMerge(mergeWith, updateAllTypes);
}

View File

@ -69,7 +69,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
}
@ -175,7 +175,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
if (strict) {
FieldNamesFieldType other = (FieldNamesFieldType)fieldType;
if (isEnabled() != other.isEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types.");
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types.");
}
}
}
@ -216,7 +216,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
FieldNamesFieldType newFieldType = fieldType().clone();
newFieldType.setEnabled(false);
newFieldType.freeze();
fieldTypeRef.set(newFieldType);
this.fieldType = newFieldType;
}
}
@ -290,7 +290,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
document.add(new Field(fieldType().names().indexName(), fieldName, fieldType()));
document.add(new Field(fieldType().name(), fieldName, fieldType()));
}
}
}

View File

@ -77,7 +77,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
@ -285,10 +285,10 @@ public class IdFieldMapper extends MetadataFieldMapper {
} // else we are in the pre/post parse phase
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
fields.add(new Field(fieldType().names().indexName(), context.id(), fieldType()));
fields.add(new Field(fieldType().name(), context.id(), fieldType()));
}
if (fieldType().hasDocValues()) {
fields.add(new BinaryDocValuesField(fieldType().names().indexName(), new BytesRef(context.id())));
fields.add(new BinaryDocValuesField(fieldType().name(), new BytesRef(context.id())));
}
}

View File

@ -67,7 +67,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
@ -223,7 +223,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
}
public String value(Document document) {
Field field = (Field) document.getField(fieldType().names().indexName());
Field field = (Field) document.getField(fieldType().name());
return field == null ? null : (String)fieldType().value(field);
}
@ -247,7 +247,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
if (!enabledState.enabled) {
return;
}
fields.add(new Field(fieldType().names().indexName(), context.index(), fieldType()));
fields.add(new Field(fieldType().name(), context.index(), fieldType()));
}
@Override

View File

@ -75,7 +75,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
JOIN_FIELD_TYPE.setHasDocValues(true);
@ -120,9 +120,9 @@ public class ParentFieldMapper extends MetadataFieldMapper {
if (parentType == null) {
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
}
parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(documentType)));
parentJoinFieldType.setName(joinField(documentType));
parentJoinFieldType.setFieldDataType(null);
childJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType)));
childJoinFieldType.setName(joinField(parentType));
return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings());
}
}
@ -242,7 +242,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
}
}
return new TermsQuery(names().indexName(), bValues);
return new TermsQuery(name(), bValues);
}
}
@ -269,7 +269,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) {
MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
parentJoinFieldType.setNames(new MappedFieldType.Names(joinField(parentType)));
parentJoinFieldType.setName(joinField(parentType));
parentJoinFieldType.freeze();
return parentJoinFieldType;
}
@ -312,7 +312,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
} else {
// otherwise, we are running it post processing of the xcontent
@ -324,7 +324,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) {
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
@ -336,7 +336,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
private void addJoinFieldIfNeeded(List<Field> fields, MappedFieldType fieldType, String id) {
if (fieldType.hasDocValues()) {
fields.add(new SortedDocValuesField(fieldType.names().indexName(), new BytesRef(id)));
fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(id)));
}
}

View File

@ -62,7 +62,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
@ -179,7 +179,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
}
public String value(Document document) {
Field field = (Field) document.getField(fieldType().names().indexName());
Field field = (Field) document.getField(fieldType().name());
return field == null ? null : (String)fieldType().value(field);
}
@ -206,10 +206,10 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
String routing = context.sourceToParse().routing();
if (routing != null) {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) {
context.ignoredValue(fieldType().names().indexName(), routing);
context.ignoredValue(fieldType().name(), routing);
return;
}
fields.add(new Field(fieldType().names().indexName(), routing, fieldType()));
fields.add(new Field(fieldType().name(), routing, fieldType()));
}
}
}

View File

@ -74,7 +74,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
@ -272,7 +272,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
if (!source.hasArray()) {
source = source.toBytesArray();
}
fields.add(new StoredField(fieldType().names().indexName(), source.array(), source.arrayOffset(), source.length()));
fields.add(new StoredField(fieldType().name(), source.array(), source.arrayOffset(), source.length()));
}
@Override

View File

@ -64,7 +64,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT);
TTL_FIELD_TYPE.setIndexAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_64_BIT));
TTL_FIELD_TYPE.setSearchAnalyzer(NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE));
TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
TTL_FIELD_TYPE.setName(NAME);
TTL_FIELD_TYPE.freeze();
}

View File

@ -67,7 +67,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setStored(true);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER);
FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT));
FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE));
@ -95,8 +95,8 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
private boolean explicitStore = false;
private Boolean ignoreMissing = null;
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing, Settings settings) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, chooseFieldType(settings, null));
if (existing != null) {
// if there is an existing type, always use that store value (only matters for < 2.0)
explicitStore = true;
@ -167,7 +167,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.mapperService().getIndexSettings().getSettings());
if (parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
parseField(builder, builder.name, node, parserContext);
}
@ -260,7 +260,7 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
private final Boolean ignoreMissing;
private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings);
this(chooseFieldType(indexSettings, existing).clone(), chooseFieldType(indexSettings, null).clone(), Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, null, indexSettings);
}
private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, String path,
@ -313,13 +313,13 @@ public class TimestampFieldMapper extends MetadataFieldMapper {
if (enabledState.enabled) {
long timestamp = context.sourceToParse().timestamp();
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) {
context.ignoredValue(fieldType().names().indexName(), String.valueOf(timestamp));
context.ignoredValue(fieldType().name(), String.valueOf(timestamp));
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
fields.add(new LongFieldMapper.CustomLongNumericField(timestamp, fieldType()));
}
if (fieldType().hasDocValues()) {
fields.add(new NumericDocValuesField(fieldType().names().indexName(), timestamp));
fields.add(new NumericDocValuesField(fieldType().name(), timestamp));
}
}
}

View File

@ -70,7 +70,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
}
@ -84,7 +84,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
@Override
public TypeFieldMapper build(BuilderContext context) {
fieldType.setNames(buildNames(context));
fieldType.setName(buildFullName(context));
return new TypeFieldMapper(fieldType, context.indexSettings());
}
}
@ -186,9 +186,9 @@ public class TypeFieldMapper extends MetadataFieldMapper {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) {
return;
}
fields.add(new Field(fieldType().names().indexName(), context.type(), fieldType()));
fields.add(new Field(fieldType().name(), context.type(), fieldType()));
if (fieldType().hasDocValues()) {
fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(context.type())));
fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.type())));
}
}

View File

@ -66,7 +66,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
NESTED_FIELD_TYPE = FIELD_TYPE.clone();
@ -193,7 +193,7 @@ public class UidFieldMapper extends MetadataFieldMapper {
}
public Term term(String uid) {
return new Term(fieldType().names().indexName(), fieldType().indexedValueForSearch(uid));
return new Term(fieldType().name(), fieldType().indexedValueForSearch(uid));
}
@Override

View File

@ -51,7 +51,7 @@ public class VersionFieldMapper extends MetadataFieldMapper {
public static final MappedFieldType FIELD_TYPE = new VersionFieldType();
static {
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setName(NAME);
FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC);
FIELD_TYPE.setHasDocValues(true);
FIELD_TYPE.freeze();

View File

@ -229,7 +229,7 @@ public class IpFieldMapper extends NumberFieldMapper {
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
lowerTerm == null ? null : parseValue(lowerTerm),
upperTerm == null ? null : parseValue(upperTerm),
includeLower, includeUpper);
@ -244,7 +244,7 @@ public class IpFieldMapper extends NumberFieldMapper {
} catch (IllegalArgumentException e) {
iSim = fuzziness.asLong();
}
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
return NumericRangeQuery.newLongRange(name(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
@ -287,7 +287,7 @@ public class IpFieldMapper extends NumberFieldMapper {
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), ipAsString, fieldType().boost());
context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost());
}
final long value = ipToLong(ipAsString);

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
@ -493,6 +494,28 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
}
}
@Override
public ObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
List<Mapper> updatedMappers = null;
for (Mapper mapper : this) {
Mapper updated = mapper.updateFieldType(fullNameToFieldType);
if (mapper != updated) {
if (updatedMappers == null) {
updatedMappers = new ArrayList<>();
}
updatedMappers.add(updated);
}
}
if (updatedMappers == null) {
return this;
}
ObjectMapper updated = clone();
for (Mapper updatedMapper : updatedMappers) {
updated.putMapper(updatedMapper);
}
return updated;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
toXContent(builder, params, null);

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
@ -295,6 +296,11 @@ public class RootObjectMapper extends ObjectMapper {
this.dynamicTemplates = mergedTemplates.toArray(new DynamicTemplate[mergedTemplates.size()]);
}
@Override
public RootObjectMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
return (RootObjectMapper) super.updateFieldType(fullNameToFieldType);
}
@Override
protected void doXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
if (dynamicDateTimeFormatters != Defaults.DYNAMIC_DATE_TIME_FORMATTERS) {

View File

@ -54,7 +54,7 @@ final class QueriesLoaderCollector extends SimpleCollector {
QueriesLoaderCollector(PercolatorQueriesRegistry percolator, ESLogger logger, MapperService mapperService, IndexFieldDataService indexFieldDataService) {
this.percolator = percolator;
this.logger = logger;
final MappedFieldType uidMapper = mapperService.smartNameFieldType(UidFieldMapper.NAME);
final MappedFieldType uidMapper = mapperService.fullName(UidFieldMapper.NAME);
this.uidFieldData = indexFieldDataService.getForField(uidMapper);
}

View File

@ -235,7 +235,7 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
String field;
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
field = fieldType.names().indexName();
field = fieldType.name();
} else {
field = fieldName;
}

View File

@ -101,7 +101,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (fieldType != null) {
f = fieldType.names().indexName();
f = fieldType.name();
} else {
f = field;
}

View File

@ -87,7 +87,7 @@ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder<FieldMask
String fieldInQuery = fieldName;
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
fieldInQuery = fieldType.names().indexName();
fieldInQuery = fieldType.name();
}
Query innerQuery = queryBuilder.toQuery(context);
assert innerQuery instanceof SpanQuery;

View File

@ -265,7 +265,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
}
if (context.indexVersionCreated().onOrAfter(Version.V_2_2_0)) {
return new GeoPointInBBoxQuery(fieldType.names().fullName(), luceneTopLeft.lon(), luceneBottomRight.lat(),
return new GeoPointInBBoxQuery(fieldType.name(), luceneTopLeft.lon(), luceneBottomRight.lat(),
luceneBottomRight.lon(), luceneTopLeft.lat());
}

View File

@ -236,7 +236,7 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
}
normDistance = GeoUtils.maxRadialDistance(center, normDistance);
return new GeoPointDistanceQuery(fieldType.names().fullName(), center.lon(), center.lat(), normDistance);
return new GeoPointDistanceQuery(fieldType.name(), center.lon(), center.lat(), normDistance);
}
@Override

View File

@ -273,7 +273,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
indexFieldData, optimizeBbox);
}
return new GeoPointDistanceRangeQuery(fieldType.names().fullName(), point.lon(), point.lat(),
return new GeoPointDistanceRangeQuery(fieldType.name(), point.lon(), point.lat(),
(includeLower) ? fromValue : fromValue + TOLERANCE,
(includeUpper) ? toValue : toValue - TOLERANCE);
}

View File

@ -149,7 +149,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
lats[i] = p.lat();
lons[i] = p.lon();
}
return new GeoPointInPolygonQuery(fieldType.names().fullName(), lons, lats);
return new GeoPointInPolygonQuery(fieldType.name(), lons, lats);
}
@Override

View File

@ -810,7 +810,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} else {
for (String field : fields) {
MappedFieldType fieldType = context.fieldMapper(field);
moreLikeFields.add(fieldType == null ? field : fieldType.names().indexName());
moreLikeFields.add(fieldType == null ? field : fieldType.name());
}
}

View File

@ -251,11 +251,11 @@ public class QueryShardContext {
}
public MappedFieldType fieldMapper(String name) {
return failIfFieldMappingNotFound(name, mapperService.smartNameFieldType(name, getTypes()));
return failIfFieldMappingNotFound(name, mapperService.fullName(name));
}
public ObjectMapper getObjectMapper(String name) {
return mapperService.getObjectMapper(name, getTypes());
return mapperService.getObjectMapper(name);
}
/**

View File

@ -294,7 +294,7 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
private static String resolveIndexName(String fieldName, QueryShardContext context) {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
return fieldType.names().indexName();
return fieldType.name();
}
return fieldName;
}

View File

@ -73,7 +73,7 @@ public class SpanTermQueryBuilder extends BaseTermQueryBuilder<SpanTermQueryBuil
String fieldName = this.fieldName;
MappedFieldType mapper = context.fieldMapper(fieldName);
if (mapper != null) {
fieldName = mapper.names().indexName();
fieldName = mapper.name();
valueBytes = mapper.indexedValueForSearch(value);
}
if (valueBytes == null) {

View File

@ -262,7 +262,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
MappedFieldType fieldType = context.fieldMapper(fieldName);
String indexFieldName;
if (fieldType != null) {
indexFieldName = fieldType.names().indexName();
indexFieldName = fieldType.name();
} else {
indexFieldName = fieldName;
}

View File

@ -118,7 +118,7 @@ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuil
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType != null) {
indexFieldName = fieldType.names().indexName();
indexFieldName = fieldType.name();
valueBytes = fieldType.indexedValueForSearch(value);
} else {
indexFieldName = fieldName;

View File

@ -374,7 +374,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder> ext
@Override
protected String getFieldName() {
return fieldData.getFieldNames().fullName();
return fieldData.getFieldName();
}
@Override
@ -450,7 +450,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder> ext
@Override
protected String getFieldName() {
return fieldData.getFieldNames().fullName();
return fieldData.getFieldName();
}
@Override

View File

@ -148,7 +148,7 @@ public class FieldValueFactorFunctionBuilder extends ScoreFunctionBuilder<FieldV
@Override
protected ScoreFunction doToFunction(QueryShardContext context) {
MappedFieldType fieldType = context.getMapperService().smartNameFieldType(field);
MappedFieldType fieldType = context.getMapperService().fullName(field);
IndexNumericFieldData fieldData = null;
if (fieldType == null) {
if(missing == null) {

View File

@ -117,7 +117,7 @@ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder<RandomScore
@Override
protected ScoreFunction doToFunction(QueryShardContext context) {
final MappedFieldType fieldType = context.getMapperService().smartNameFieldType("_uid");
final MappedFieldType fieldType = context.getMapperService().fullName("_uid");
if (fieldType == null) {
// mapper could be null if we are on a shard with no docs yet, so this won't actually be used
return new RandomScoreFunction();

Some files were not shown because too many files have changed in this diff Show More