Remove the _all metadata field (#26356)
* Remove the _all metadata field This change removes the `_all` metadata field. This field is deprecated in 6 and cannot be activated for indices created in 6 so it can be safely removed in the next major version (e.g. 7).
This commit is contained in:
parent
f95dec797d
commit
86d97971a4
|
@ -35,7 +35,6 @@ import org.apache.lucene.search.spans.SpanTermQuery;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.index.search.ESToParentBlockJoinQuery;
|
||||
|
@ -206,9 +205,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter {
|
|||
tqs.add(new TermQuery(term));
|
||||
}
|
||||
return tqs;
|
||||
} else if (query instanceof AllTermQuery) {
|
||||
AllTermQuery atq = (AllTermQuery) query;
|
||||
return Collections.singletonList(new TermQuery(atq.getTerm()));
|
||||
} else if (query instanceof FunctionScoreQuery) {
|
||||
return Collections.singletonList(((FunctionScoreQuery) query).getSubQuery());
|
||||
} else if (query instanceof ESToParentBlockJoinQuery) {
|
||||
|
|
|
@ -55,7 +55,6 @@ import org.elasticsearch.index.analysis.MultiTermAwareComponent;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -151,13 +150,11 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
|
|||
}
|
||||
if (field == null) {
|
||||
/**
|
||||
* TODO: _all is disabled by default and index.query.default_field can define multiple fields or pattterns so we should
|
||||
* TODO: _all is disabled by default and index.query.default_field can define multiple fields or patterns so we should
|
||||
* probably makes the field name mandatory in analyze query.
|
||||
**/
|
||||
if (indexService != null) {
|
||||
field = indexService.getIndexSettings().getDefaultFields().get(0);
|
||||
} else {
|
||||
field = AllFieldMapper.NAME;
|
||||
}
|
||||
}
|
||||
final AnalysisRegistry analysisRegistry = indicesService.getAnalysis();
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.all;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class AllEntries {
|
||||
public static class Entry {
|
||||
private final String name;
|
||||
private final String value;
|
||||
private final float boost;
|
||||
|
||||
public Entry(String name, String value, float boost) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
public float boost() {
|
||||
return this.boost;
|
||||
}
|
||||
|
||||
public String value() {
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
|
||||
private final List<Entry> entries = new ArrayList<>();
|
||||
|
||||
public void addText(String name, String text, float boost) {
|
||||
Entry entry = new Entry(name, text, boost);
|
||||
entries.add(entry);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
this.entries.clear();
|
||||
}
|
||||
|
||||
public List<Entry> entries() {
|
||||
return this.entries;
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.all;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
||||
public class AllField extends Field {
|
||||
private final float boost;
|
||||
|
||||
public AllField(String name, String value, float boost, FieldType fieldType) {
|
||||
super(name, value, fieldType);
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) {
|
||||
TokenStream ts = analyzer.tokenStream(name(), stringValue());
|
||||
if (boost != 1.0f && fieldType().indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) {
|
||||
// TODO: we should be able to reuse "previous" if its instanceof AllTokenStream?
|
||||
// but we need to be careful this optimization is safe (and tested)...
|
||||
|
||||
// AllTokenStream maps boost to 4-byte payloads, so we only need to use it any field had non-default (!= 1.0f) boost and if
|
||||
// positions are indexed:
|
||||
return new AllTokenStream(ts, boost);
|
||||
}
|
||||
return ts;
|
||||
}
|
||||
}
|
|
@ -1,237 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.all;
|
||||
|
||||
import org.apache.lucene.analysis.payloads.PayloadHelper;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.index.TermState;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TermStatistics;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.search.similarities.Similarity.SimScorer;
|
||||
import org.apache.lucene.search.similarities.Similarity.SimWeight;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.SmallFloat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A term query that takes all payload boost values into account.
|
||||
* <p>
|
||||
* It is like PayloadTermQuery with AveragePayloadFunction, except
|
||||
* unlike PayloadTermQuery, it doesn't plug into the similarity to
|
||||
* determine how the payload should be factored in, it just parses
|
||||
* the float and multiplies the average with the regular score.
|
||||
*/
|
||||
public final class AllTermQuery extends Query {
|
||||
|
||||
private final Term term;
|
||||
|
||||
public AllTermQuery(Term term) {
|
||||
this.term = term;
|
||||
}
|
||||
|
||||
public Term getTerm() {
|
||||
return term;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
return Objects.equals(term, ((AllTermQuery) obj).term);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * classHash() + term.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
Query rewritten = super.rewrite(reader);
|
||||
if (rewritten != this) {
|
||||
return rewritten;
|
||||
}
|
||||
boolean hasPayloads = false;
|
||||
for (LeafReaderContext context : reader.leaves()) {
|
||||
final Terms terms = context.reader().terms(term.field());
|
||||
if (terms != null) {
|
||||
if (terms.hasPayloads()) {
|
||||
hasPayloads = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// if the terms does not exist we could return a MatchNoDocsQuery but this would break the unified highlighter
|
||||
// which rewrites query with an empty reader.
|
||||
if (hasPayloads == false) {
|
||||
return new TermQuery(term);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
if (needsScores == false) {
|
||||
return new TermQuery(term).createWeight(searcher, needsScores, boost);
|
||||
}
|
||||
final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term);
|
||||
final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field());
|
||||
final TermStatistics termStats = searcher.termStatistics(term, termStates);
|
||||
final Similarity similarity = searcher.getSimilarity(needsScores);
|
||||
final SimWeight stats = similarity.computeWeight(boost, collectionStats, termStats);
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
terms.add(term);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
AllTermScorer scorer = scorer(context);
|
||||
if (scorer != null) {
|
||||
int newDoc = scorer.iterator().advance(doc);
|
||||
if (newDoc == doc) {
|
||||
float score = scorer.score();
|
||||
float freq = scorer.freq();
|
||||
SimScorer docScorer = similarity.simScorer(stats, context);
|
||||
Explanation freqExplanation = Explanation.match(freq, "termFreq=" + freq);
|
||||
Explanation termScoreExplanation = docScorer.explain(doc, freqExplanation);
|
||||
Explanation payloadBoostExplanation =
|
||||
Explanation.match(scorer.payloadBoost(), "payloadBoost=" + scorer.payloadBoost());
|
||||
return Explanation.match(
|
||||
score,
|
||||
"weight(" + getQuery() + " in " + doc + ") ["
|
||||
+ similarity.getClass().getSimpleName() + "], product of:",
|
||||
termScoreExplanation, payloadBoostExplanation);
|
||||
}
|
||||
}
|
||||
return Explanation.noMatch("no matching term");
|
||||
}
|
||||
|
||||
@Override
|
||||
public AllTermScorer scorer(LeafReaderContext context) throws IOException {
|
||||
final Terms terms = context.reader().terms(term.field());
|
||||
if (terms == null) {
|
||||
return null;
|
||||
}
|
||||
final TermsEnum termsEnum = terms.iterator();
|
||||
if (termsEnum == null) {
|
||||
return null;
|
||||
}
|
||||
final TermState state = termStates.get(context.ord);
|
||||
if (state == null) {
|
||||
// Term does not exist in this segment
|
||||
return null;
|
||||
}
|
||||
termsEnum.seekExact(term.bytes(), state);
|
||||
PostingsEnum docs = termsEnum.postings(null, PostingsEnum.PAYLOADS);
|
||||
assert docs != null;
|
||||
return new AllTermScorer(this, docs, similarity.simScorer(stats, context));
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
private static class AllTermScorer extends Scorer {
|
||||
|
||||
final PostingsEnum postings;
|
||||
final Similarity.SimScorer docScorer;
|
||||
int doc = -1;
|
||||
float payloadBoost;
|
||||
|
||||
AllTermScorer(Weight weight, PostingsEnum postings, Similarity.SimScorer docScorer) {
|
||||
super(weight);
|
||||
this.postings = postings;
|
||||
this.docScorer = docScorer;
|
||||
}
|
||||
|
||||
float payloadBoost() throws IOException {
|
||||
if (doc != docID()) {
|
||||
final int freq = postings.freq();
|
||||
payloadBoost = 0;
|
||||
for (int i = 0; i < freq; ++i) {
|
||||
postings.nextPosition();
|
||||
final BytesRef payload = postings.getPayload();
|
||||
float boost;
|
||||
if (payload == null) {
|
||||
boost = 1;
|
||||
} else if (payload.length == 1) {
|
||||
boost = SmallFloat.byte315ToFloat(payload.bytes[payload.offset]);
|
||||
} else if (payload.length == 4) {
|
||||
// TODO: for bw compat only, remove this in 6.0
|
||||
boost = PayloadHelper.decodeFloat(payload.bytes, payload.offset);
|
||||
} else {
|
||||
throw new IllegalStateException("Payloads are expected to have a length of 1 or 4 but got: "
|
||||
+ payload);
|
||||
}
|
||||
payloadBoost += boost;
|
||||
}
|
||||
payloadBoost /= freq;
|
||||
doc = docID();
|
||||
}
|
||||
return payloadBoost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
return payloadBoost() * docScorer.score(postings.docID(), postings.freq());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int freq() throws IOException {
|
||||
return postings.freq();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return postings.docID();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSetIterator iterator() {
|
||||
return postings;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return new TermQuery(term).toString(field);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.all;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.SmallFloat;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public final class AllTokenStream extends TokenFilter {
|
||||
public static TokenStream allTokenStream(String allFieldName, String value, float boost, Analyzer analyzer) throws IOException {
|
||||
return new AllTokenStream(analyzer.tokenStream(allFieldName, value), boost);
|
||||
}
|
||||
|
||||
private final BytesRef payloadSpare = new BytesRef(new byte[1]);
|
||||
private final PayloadAttribute payloadAttribute;
|
||||
|
||||
AllTokenStream(TokenStream input, float boost) {
|
||||
super(input);
|
||||
payloadAttribute = addAttribute(PayloadAttribute.class);
|
||||
payloadSpare.bytes[0] = SmallFloat.floatToByte315(boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (!input.incrementToken()) {
|
||||
return false;
|
||||
}
|
||||
payloadAttribute.setPayload(payloadSpare);
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
|
@ -49,21 +48,9 @@ import java.util.function.Function;
|
|||
* be called for each settings update.
|
||||
*/
|
||||
public final class IndexSettings {
|
||||
public static final String DEFAULT_FIELD_SETTING_KEY = "index.query.default_field";
|
||||
public static final Setting<List<String>> DEFAULT_FIELD_SETTING;
|
||||
static {
|
||||
Function<Settings, List<String>> defValue = settings -> {
|
||||
final String defaultField;
|
||||
if (settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null) != null &&
|
||||
Version.indexCreated(settings).before(Version.V_6_0_0_alpha1)) {
|
||||
defaultField = AllFieldMapper.NAME;
|
||||
} else {
|
||||
defaultField = "*";
|
||||
}
|
||||
return Collections.singletonList(defaultField);
|
||||
};
|
||||
DEFAULT_FIELD_SETTING = Setting.listSetting(DEFAULT_FIELD_SETTING_KEY, defValue, Function.identity(), Property.IndexScope, Property.Dynamic);
|
||||
}
|
||||
public static final Setting<List<String>> DEFAULT_FIELD_SETTING =
|
||||
Setting.listSetting("index.query.default_field", Collections.singletonList("*"),
|
||||
Function.identity(), Property.IndexScope, Property.Dynamic);
|
||||
public static final Setting<Boolean> QUERY_STRING_LENIENT_SETTING =
|
||||
Setting.boolSetting("index.query_string.lenient", false, Property.IndexScope);
|
||||
public static final Setting<Boolean> QUERY_STRING_ANALYZE_WILDCARD =
|
||||
|
|
|
@ -1,319 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.lucene.all.AllField;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
|
||||
import static org.elasticsearch.index.mapper.TypeParsers.parseTextField;
|
||||
|
||||
public class AllFieldMapper extends MetadataFieldMapper {
|
||||
|
||||
public static final String NAME = "_all";
|
||||
|
||||
public static final String CONTENT_TYPE = "_all";
|
||||
|
||||
public static class Defaults {
|
||||
public static final String NAME = AllFieldMapper.NAME;
|
||||
public static final String INDEX_NAME = AllFieldMapper.NAME;
|
||||
public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
public static final int POSITION_INCREMENT_GAP = 100;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new AllFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
FIELD_TYPE.setTokenized(true);
|
||||
FIELD_TYPE.setName(NAME);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, AllFieldMapper> {
|
||||
|
||||
private EnabledAttributeMapper enabled = Defaults.ENABLED;
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder enabled(EnabledAttributeMapper enabled) {
|
||||
this.enabled = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AllFieldMapper build(BuilderContext context) {
|
||||
// In case the mapping overrides these
|
||||
// TODO: this should be an exception! it doesnt make sense to not index this field
|
||||
if (fieldType.indexOptions() == IndexOptions.NONE) {
|
||||
fieldType.setIndexOptions(Defaults.FIELD_TYPE.indexOptions());
|
||||
} else {
|
||||
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(),
|
||||
Defaults.POSITION_INCREMENT_GAP));
|
||||
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(),
|
||||
Defaults.POSITION_INCREMENT_GAP));
|
||||
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(),
|
||||
Defaults.POSITION_INCREMENT_GAP));
|
||||
}
|
||||
fieldType.setTokenized(true);
|
||||
|
||||
return new AllFieldMapper(fieldType, enabled, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node,
|
||||
ParserContext parserContext) throws MapperParsingException {
|
||||
if (node.isEmpty() == false &&
|
||||
parserContext.indexVersionCreated().onOrAfter(Version.V_6_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("[_all] is disabled in 6.0. As a replacement, you can use an [copy_to] " +
|
||||
"on mapping fields to create your own catch all field.");
|
||||
}
|
||||
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
|
||||
builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
|
||||
builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer());
|
||||
|
||||
// parseField below will happily parse the doc_values setting, but it is then never passed to
|
||||
// the AllFieldMapper ctor in the builder since it is not valid. Here we validate
|
||||
// the doc values settings (old and new) are rejected
|
||||
Object docValues = node.get("doc_values");
|
||||
if (docValues != null && TypeParsers.nodeBooleanValueLenient(name, "doc_values", docValues)) {
|
||||
throw new MapperParsingException("Field [" + name +
|
||||
"] is always tokenized and cannot have doc values");
|
||||
}
|
||||
// convoluted way of specifying doc values
|
||||
Object fielddata = node.get("fielddata");
|
||||
if (fielddata != null) {
|
||||
Map<String, Object> fielddataMap = nodeMapValue(fielddata, "fielddata");
|
||||
Object format = fielddataMap.get("format");
|
||||
if ("doc_values".equals(format)) {
|
||||
throw new MapperParsingException("Field [" + name +
|
||||
"] is always tokenized and cannot have doc values");
|
||||
}
|
||||
}
|
||||
|
||||
parseTextField(builder, builder.name, node, parserContext);
|
||||
boolean enabledSet = false;
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = entry.getKey();
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("enabled")) {
|
||||
boolean enabled = TypeParsers.nodeBooleanValueLenient(name, "enabled", fieldNode);
|
||||
builder.enabled(enabled ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
|
||||
enabledSet = true;
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
if (enabledSet == false && parserContext.indexVersionCreated().before(Version.V_6_0_0_alpha1)) {
|
||||
// So there is no "enabled" field, however, the index was created prior to 6.0,
|
||||
// and therefore the default for this particular index should be "true" for
|
||||
// enabling _all
|
||||
builder.enabled(EnabledAttributeMapper.ENABLED);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) {
|
||||
final Settings indexSettings = context.mapperService().getIndexSettings().getSettings();
|
||||
if (fieldType != null) {
|
||||
if (context.indexVersionCreated().before(Version.V_6_0_0_alpha1)) {
|
||||
// The index was created prior to 6.0, and therefore the default for this
|
||||
// particular index should be "true" for enabling _all
|
||||
return new AllFieldMapper(fieldType.clone(), EnabledAttributeMapper.ENABLED, indexSettings);
|
||||
} else {
|
||||
return new AllFieldMapper(indexSettings, fieldType);
|
||||
}
|
||||
} else {
|
||||
return parse(NAME, Collections.emptyMap(), context)
|
||||
.build(new BuilderContext(indexSettings, new ContentPath(1)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static final class AllFieldType extends StringFieldType {
|
||||
|
||||
AllFieldType() {
|
||||
}
|
||||
|
||||
protected AllFieldType(AllFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
return new AllFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query queryStringTermQuery(Term term) {
|
||||
return new AllTermQuery(term);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
return queryStringTermQuery(new Term(name(), indexedValueForSearch(value)));
|
||||
}
|
||||
}
|
||||
|
||||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
private AllFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing.clone(), Defaults.ENABLED, indexSettings);
|
||||
}
|
||||
|
||||
private AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.enabledState = enabled;
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return this.enabledState.enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
super.parse(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
// we parse in post parse
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
if (!enabledState.enabled) {
|
||||
return;
|
||||
}
|
||||
for (AllEntries.Entry entry : context.allEntries().entries()) {
|
||||
fields.add(new AllField(fieldType().name(), entry.value(), entry.boost(), fieldType()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
if (!includeDefaults) {
|
||||
// simulate the generation to make sure we don't add unnecessary content if all is default
|
||||
// if all are defaults, no need to write it at all - generating is twice is ok though
|
||||
BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(0);
|
||||
XContentBuilder b = new XContentBuilder(builder.contentType().xContent(), bytesStreamOutput);
|
||||
b.startObject().flush();
|
||||
long pos = bytesStreamOutput.position();
|
||||
innerToXContent(b, false);
|
||||
b.flush();
|
||||
if (pos == bytesStreamOutput.position()) {
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
innerToXContent(builder, includeDefaults);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
private void innerToXContent(XContentBuilder builder, boolean includeDefaults) throws IOException {
|
||||
if (includeDefaults || enabledState != Defaults.ENABLED) {
|
||||
builder.field("enabled", enabledState.enabled);
|
||||
}
|
||||
if (enabled() == false) {
|
||||
return;
|
||||
}
|
||||
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) {
|
||||
builder.field("store", fieldType().stored());
|
||||
}
|
||||
if (includeDefaults || fieldType().storeTermVectors() != Defaults.FIELD_TYPE.storeTermVectors()) {
|
||||
builder.field("store_term_vectors", fieldType().storeTermVectors());
|
||||
}
|
||||
if (includeDefaults || fieldType().storeTermVectorOffsets() != Defaults.FIELD_TYPE.storeTermVectorOffsets()) {
|
||||
builder.field("store_term_vector_offsets", fieldType().storeTermVectorOffsets());
|
||||
}
|
||||
if (includeDefaults ||
|
||||
fieldType().storeTermVectorPositions() != Defaults.FIELD_TYPE.storeTermVectorPositions()) {
|
||||
builder.field("store_term_vector_positions", fieldType().storeTermVectorPositions());
|
||||
}
|
||||
if (includeDefaults ||
|
||||
fieldType().storeTermVectorPayloads() != Defaults.FIELD_TYPE.storeTermVectorPayloads()) {
|
||||
builder.field("store_term_vector_payloads", fieldType().storeTermVectorPayloads());
|
||||
}
|
||||
if (includeDefaults || fieldType().omitNorms() != Defaults.FIELD_TYPE.omitNorms()) {
|
||||
builder.field("norms", !fieldType().omitNorms());
|
||||
}
|
||||
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (fieldType().similarity() != null) {
|
||||
builder.field("similarity", fieldType().similarity().name());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("similarity", SimilarityService.DEFAULT_SIMILARITY);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
if (((AllFieldMapper)mergeWith).enabled() != this.enabled() &&
|
||||
((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) {
|
||||
throw new IllegalArgumentException("mapper [" + fieldType().name() +
|
||||
"] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
|
||||
}
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
}
|
||||
|
||||
}
|
|
@ -128,7 +128,7 @@ public class DateFieldMapper extends FieldMapper {
|
|||
public DateFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new DateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -390,8 +390,6 @@ public class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
private DateFieldMapper(
|
||||
|
@ -399,13 +397,11 @@ public class DateFieldMapper extends FieldMapper {
|
|||
MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed,
|
||||
Boolean includeInAll,
|
||||
Settings indexSettings,
|
||||
MultiFields multiFields,
|
||||
CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -456,10 +452,6 @@ public class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
if (context.includeInAll(includeInAll, this)) {
|
||||
context.allEntries().addText(fieldType().name(), dateAsString, fieldType().boost());
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE) {
|
||||
fields.add(new LongPoint(fieldType().name(), timestamp));
|
||||
}
|
||||
|
@ -475,7 +467,6 @@ public class DateFieldMapper extends FieldMapper {
|
|||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
final DateFieldMapper other = (DateFieldMapper) mergeWith;
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.includeInAll = other.includeInAll;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
}
|
||||
|
@ -493,11 +484,6 @@ public class DateFieldMapper extends FieldMapper {
|
|||
builder.field("null_value", fieldType().nullValueAsString());
|
||||
}
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", false);
|
||||
}
|
||||
if (includeDefaults
|
||||
|| fieldType().dateTimeFormatter().format().equals(DEFAULT_DATE_TIME_FORMATTER.format()) == false) {
|
||||
builder.field("format", fieldType().dateTimeFormatter().format());
|
||||
|
|
|
@ -221,10 +221,6 @@ public class DocumentMapper implements ToXContentFragment {
|
|||
return metadataMapper(SourceFieldMapper.class);
|
||||
}
|
||||
|
||||
public AllFieldMapper allFieldMapper() {
|
||||
return metadataMapper(AllFieldMapper.class);
|
||||
}
|
||||
|
||||
public IdFieldMapper idFieldMapper() {
|
||||
return metadataMapper(IdFieldMapper.class);
|
||||
}
|
||||
|
|
|
@ -353,12 +353,6 @@ final class DocumentParser {
|
|||
context = nestedContext(context, mapper);
|
||||
}
|
||||
|
||||
// update the default value of include_in_all if necessary
|
||||
Boolean includeInAll = mapper.includeInAll();
|
||||
if (includeInAll != null) {
|
||||
context = context.setIncludeInAllDefault(includeInAll);
|
||||
}
|
||||
|
||||
// if we are at the end of the previous object, advance
|
||||
if (token == XContentParser.Token.END_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
|
|
|
@ -57,7 +57,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
protected final MappedFieldType defaultFieldType;
|
||||
private final IndexOptions defaultOptions;
|
||||
protected boolean omitNormsSet = false;
|
||||
protected Boolean includeInAll;
|
||||
protected boolean indexOptionsSet = false;
|
||||
protected boolean docValuesSet = false;
|
||||
protected final MultiFields.Builder multiFieldsBuilder;
|
||||
|
@ -182,11 +181,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public T includeInAll(Boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T similarity(SimilarityProvider similarity) {
|
||||
this.fieldType.setSimilarity(similarity);
|
||||
return builder;
|
||||
|
|
|
@ -187,9 +187,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
}
|
||||
}
|
||||
|
||||
protected void parse(ParseContext originalContext, GeoPoint point) throws IOException {
|
||||
// Geopoint fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
protected void parse(ParseContext context, GeoPoint point) throws IOException {
|
||||
|
||||
if (ignoreMalformed.value() == false) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
|
|
|
@ -86,7 +86,7 @@ public class IpFieldMapper extends FieldMapper {
|
|||
public IpFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new IpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,8 +302,6 @@ public class IpFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
private IpFieldMapper(
|
||||
|
@ -311,13 +309,11 @@ public class IpFieldMapper extends FieldMapper {
|
|||
MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed,
|
||||
Boolean includeInAll,
|
||||
Settings indexSettings,
|
||||
MultiFields multiFields,
|
||||
CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -368,10 +364,6 @@ public class IpFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
if (context.includeInAll(includeInAll, this)) {
|
||||
context.allEntries().addText(fieldType().name(), addressAsString, fieldType().boost());
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE) {
|
||||
fields.add(new InetAddressPoint(fieldType().name(), address));
|
||||
}
|
||||
|
@ -387,7 +379,6 @@ public class IpFieldMapper extends FieldMapper {
|
|||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
IpFieldMapper other = (IpFieldMapper) mergeWith;
|
||||
this.includeInAll = other.includeInAll;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
}
|
||||
|
@ -408,10 +399,5 @@ public class IpFieldMapper extends FieldMapper {
|
|||
if (includeDefaults || ignoreMalformed.explicit()) {
|
||||
builder.field("ignore_malformed", ignoreMalformed.value());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
public KeywordFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new KeywordFieldMapper(
|
||||
name, fieldType, defaultFieldType, ignoreAbove, includeInAll,
|
||||
name, fieldType, defaultFieldType, ignoreAbove,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
@ -255,16 +255,13 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
private int ignoreAbove;
|
||||
|
||||
protected KeywordFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
int ignoreAbove, Boolean includeInAll,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
int ignoreAbove, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
/** Values that have more chars than the return value of this method will
|
||||
|
@ -284,11 +281,6 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
return (KeywordFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
// pkg-private for testing
|
||||
Boolean includeInAll() {
|
||||
return includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
String value;
|
||||
|
@ -328,10 +320,6 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
if (context.includeInAll(includeInAll, this)) {
|
||||
context.allEntries().addText(fieldType().name(), value, fieldType().boost());
|
||||
}
|
||||
|
||||
// convert to utf8 only once before feeding postings/dv/stored fields
|
||||
final BytesRef binaryValue = new BytesRef(value);
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
|
@ -351,7 +339,6 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.includeInAll = ((KeywordFieldMapper) mergeWith).includeInAll;
|
||||
this.ignoreAbove = ((KeywordFieldMapper) mergeWith).ignoreAbove;
|
||||
}
|
||||
|
||||
|
@ -363,12 +350,6 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", true);
|
||||
}
|
||||
|
||||
if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) {
|
||||
builder.field("ignore_above", ignoreAbove);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -293,7 +292,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
return nullValue;
|
||||
}
|
||||
|
||||
/** Returns the null value stringified, so it can be used for e.g. _all field, or null if there is no null value */
|
||||
/** Returns the null value stringified or null if there is no null value */
|
||||
public String nullValueAsString() {
|
||||
return nullValueAsString;
|
||||
}
|
||||
|
@ -449,9 +448,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
while (termQuery instanceof BoostQuery) {
|
||||
termQuery = ((BoostQuery) termQuery).getQuery();
|
||||
}
|
||||
if (termQuery instanceof AllTermQuery) {
|
||||
return ((AllTermQuery) termQuery).getTerm();
|
||||
} else if (termQuery instanceof TypeFieldMapper.TypesQuery) {
|
||||
if (termQuery instanceof TypeFieldMapper.TypesQuery) {
|
||||
assert ((TypeFieldMapper.TypesQuery) termQuery).getTerms().length == 1;
|
||||
return new Term(TypeFieldMapper.NAME, ((TypeFieldMapper.TypesQuery) termQuery).getTerms()[0]);
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.Dynamic, Property.IndexScope);
|
||||
|
||||
private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(
|
||||
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
|
||||
"_uid", "_id", "_type", "_parent", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
|
||||
|
@ -122,7 +122,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
private volatile FieldTypeLookup fieldTypes;
|
||||
private volatile Map<String, ObjectMapper> fullPathObjectMappers = emptyMap();
|
||||
private boolean hasNested = false; // updated dynamically to true when a nested object is added
|
||||
private boolean allEnabled = false; // updated dynamically to true when _all is enabled
|
||||
|
||||
private final DocumentMapperParser documentParser;
|
||||
|
||||
|
@ -163,13 +162,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
return this.hasNested;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the "_all" field is enabled on any type.
|
||||
*/
|
||||
public boolean allEnabled() {
|
||||
return this.allEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns an immutable iterator over current document mappers.
|
||||
*
|
||||
|
@ -346,7 +338,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
private synchronized Map<String, DocumentMapper> internalMerge(@Nullable DocumentMapper defaultMapper, @Nullable String defaultMappingSource,
|
||||
List<DocumentMapper> documentMappers, MergeReason reason, boolean updateAllTypes) {
|
||||
boolean hasNested = this.hasNested;
|
||||
boolean allEnabled = this.allEnabled;
|
||||
Map<String, ObjectMapper> fullPathObjectMappers = this.fullPathObjectMappers;
|
||||
FieldTypeLookup fieldTypes = this.fieldTypes;
|
||||
Set<String> parentTypes = this.parentTypes;
|
||||
|
@ -444,10 +435,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
parentTypes.add(mapper.parentFieldMapper().type());
|
||||
}
|
||||
|
||||
// this is only correct because types cannot be removed and we do not
|
||||
// allow to disable an existing _all field
|
||||
allEnabled |= mapper.allFieldMapper().enabled();
|
||||
|
||||
results.put(newMapper.type(), newMapper);
|
||||
mappers.put(newMapper.type(), newMapper);
|
||||
}
|
||||
|
@ -510,7 +497,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
this.hasNested = hasNested;
|
||||
this.fullPathObjectMappers = fullPathObjectMappers;
|
||||
this.parentTypes = parentTypes;
|
||||
this.allEnabled = allEnabled;
|
||||
|
||||
assert assertMappersShareSameFieldType();
|
||||
assert results.values().stream().allMatch(this::assertSerialization);
|
||||
|
|
|
@ -121,7 +121,7 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
public NumberFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new NumberFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -921,8 +921,6 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
private Explicit<Boolean> coerce;
|
||||
|
@ -933,14 +931,12 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce,
|
||||
Boolean includeInAll,
|
||||
Settings indexSettings,
|
||||
MultiFields multiFields,
|
||||
CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.coerce = coerce;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -960,7 +956,6 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
Object value;
|
||||
|
@ -983,12 +978,8 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
throw e;
|
||||
}
|
||||
}
|
||||
if (includeInAll) {
|
||||
value = parser.textOrNull(); // preserve formatting
|
||||
} else {
|
||||
value = numericValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
value = fieldType().nullValue();
|
||||
|
@ -1002,10 +993,6 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
numericValue = fieldType().type.parse(value, coerce.value());
|
||||
}
|
||||
|
||||
if (includeInAll) {
|
||||
context.allEntries().addText(fieldType().name(), value.toString(), fieldType().boost());
|
||||
}
|
||||
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean docValued = fieldType().hasDocValues();
|
||||
boolean stored = fieldType().stored();
|
||||
|
@ -1016,7 +1003,6 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
NumberFieldMapper other = (NumberFieldMapper) mergeWith;
|
||||
this.includeInAll = other.includeInAll;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
}
|
||||
|
@ -1039,11 +1025,5 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,12 +24,14 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -43,6 +45,7 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
|
||||
public class ObjectMapper extends Mapper implements Cloneable {
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(ObjectMapper.class));
|
||||
|
||||
public static final String CONTENT_TYPE = "object";
|
||||
public static final String NESTED_CONTENT_TYPE = "nested";
|
||||
|
@ -100,8 +103,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
|
||||
protected Dynamic dynamic = Defaults.DYNAMIC;
|
||||
|
||||
protected Boolean includeInAll;
|
||||
|
||||
protected final List<Mapper.Builder> mappersBuilders = new ArrayList<>();
|
||||
|
||||
public Builder(String name) {
|
||||
|
@ -124,11 +125,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public T includeInAll(boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T add(Mapper.Builder builder) {
|
||||
mappersBuilders.add(builder);
|
||||
return this.builder;
|
||||
|
@ -150,14 +146,14 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
context.path().remove();
|
||||
|
||||
ObjectMapper objectMapper = createMapper(name, context.path().pathAsText(name), enabled, nested, dynamic,
|
||||
includeInAll, mappers, context.indexSettings());
|
||||
mappers, context.indexSettings());
|
||||
|
||||
return (Y) objectMapper;
|
||||
}
|
||||
|
||||
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic,
|
||||
Boolean includeInAll, Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
return new ObjectMapper(name, fullPath, enabled, nested, dynamic, includeInAll, mappers, settings);
|
||||
Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
return new ObjectMapper(name, fullPath, enabled, nested, dynamic, mappers, settings);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,7 +196,7 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
return true;
|
||||
} else if (fieldName.equals("include_in_all")) {
|
||||
builder.includeInAll(TypeParsers.nodeBooleanValue(fieldName, "include_in_all", fieldNode, parserContext));
|
||||
deprecationLogger.deprecated("[include_in_all] is deprecated, the _all field have been removed in this version");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -313,12 +309,10 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
|
||||
private volatile Dynamic dynamic;
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private volatile CopyOnWriteHashMap<String, Mapper> mappers;
|
||||
|
||||
ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic,
|
||||
Boolean includeInAll, Map<String, Mapper> mappers, Settings settings) {
|
||||
Map<String, Mapper> mappers, Settings settings) {
|
||||
super(name);
|
||||
assert settings != null;
|
||||
if (name.isEmpty()) {
|
||||
|
@ -328,7 +322,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
this.enabled = enabled;
|
||||
this.nested = nested;
|
||||
this.dynamic = dynamic;
|
||||
this.includeInAll = includeInAll;
|
||||
if (mappers == null) {
|
||||
this.mappers = new CopyOnWriteHashMap<>();
|
||||
} else {
|
||||
|
@ -378,10 +371,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
return this.nested;
|
||||
}
|
||||
|
||||
public Boolean includeInAll() {
|
||||
return includeInAll;
|
||||
}
|
||||
|
||||
public Query nestedTypeFilter() {
|
||||
return this.nestedTypeFilter;
|
||||
}
|
||||
|
@ -429,7 +418,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
}
|
||||
|
||||
this.includeInAll = mergeWith.includeInAll;
|
||||
if (mergeWith.dynamic != null) {
|
||||
this.dynamic = mergeWith.dynamic;
|
||||
}
|
||||
|
@ -495,9 +483,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
|
|||
if (enabled != Defaults.ENABLED) {
|
||||
builder.field("enabled", enabled);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
|
||||
if (custom != null) {
|
||||
custom.toXContent(builder, params);
|
||||
|
|
|
@ -22,11 +22,9 @@ package org.elasticsearch.index.mapper;
|
|||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
|
@ -263,11 +261,6 @@ public abstract class ParseContext {
|
|||
in.seqID(seqID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AllEntries allEntries() {
|
||||
return in.allEntries();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean externalValueSet() {
|
||||
return in.externalValueSet();
|
||||
|
@ -312,7 +305,6 @@ public abstract class ParseContext {
|
|||
|
||||
private SeqNoFieldMapper.SequenceIDFields seqID;
|
||||
|
||||
private final AllEntries allEntries;
|
||||
|
||||
private final List<Mapper> dynamicMappers;
|
||||
|
||||
|
@ -328,7 +320,6 @@ public abstract class ParseContext {
|
|||
this.documents.add(document);
|
||||
this.version = null;
|
||||
this.sourceToParse = source;
|
||||
this.allEntries = new AllEntries();
|
||||
this.dynamicMappers = new ArrayList<>();
|
||||
}
|
||||
|
||||
|
@ -413,11 +404,6 @@ public abstract class ParseContext {
|
|||
this.seqID = seqID;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AllEntries allEntries() {
|
||||
return this.allEntries;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDynamicMapper(Mapper mapper) {
|
||||
dynamicMappers.add(mapper);
|
||||
|
@ -431,22 +417,6 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract DocumentMapperParser docMapperParser();
|
||||
|
||||
/** Return a view of this {@link ParseContext} that changes the return
|
||||
* value of {@link #getIncludeInAllDefault()}. */
|
||||
public final ParseContext setIncludeInAllDefault(boolean includeInAll) {
|
||||
return new FilterParseContext(this) {
|
||||
@Override
|
||||
public Boolean getIncludeInAllDefault() {
|
||||
return includeInAll;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Whether field values should be added to the _all field by default. */
|
||||
public Boolean getIncludeInAllDefault() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a new context that will be within a copy-to operation.
|
||||
*/
|
||||
|
@ -543,37 +513,6 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract void seqID(SeqNoFieldMapper.SequenceIDFields seqID);
|
||||
|
||||
public final boolean includeInAll(Boolean includeInAll, FieldMapper mapper) {
|
||||
return includeInAll(includeInAll, mapper.fieldType().indexOptions() != IndexOptions.NONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Is all included or not. Will always disable it if {@link org.elasticsearch.index.mapper.AllFieldMapper#enabled()}
|
||||
* is <tt>false</tt>. If its enabled, then will return <tt>true</tt> only if the specific flag is <tt>null</tt> or
|
||||
* its actual value (so, if not set, defaults to "true") and the field is indexed.
|
||||
*/
|
||||
private boolean includeInAll(Boolean includeInAll, boolean indexed) {
|
||||
if (isWithinCopyTo()) {
|
||||
return false;
|
||||
}
|
||||
if (isWithinMultiFields()) {
|
||||
return false;
|
||||
}
|
||||
if (!docMapper().allFieldMapper().enabled()) {
|
||||
return false;
|
||||
}
|
||||
if (includeInAll == null) {
|
||||
includeInAll = getIncludeInAllDefault();
|
||||
}
|
||||
// not explicitly set
|
||||
if (includeInAll == null) {
|
||||
return indexed;
|
||||
}
|
||||
return includeInAll;
|
||||
}
|
||||
|
||||
public abstract AllEntries allEntries();
|
||||
|
||||
/**
|
||||
* Return a new context that will have the external value set.
|
||||
*/
|
||||
|
|
|
@ -152,7 +152,7 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
public RangeFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new RangeFieldMapper(name, fieldType, defaultFieldType, coerce(context), includeInAll,
|
||||
return new RangeFieldMapper(name, fieldType, defaultFieldType, coerce(context),
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
@ -309,7 +309,6 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
private Explicit<Boolean> coerce;
|
||||
|
||||
private RangeFieldMapper(
|
||||
|
@ -317,13 +316,11 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> coerce,
|
||||
Boolean includeInAll,
|
||||
Settings indexSettings,
|
||||
MultiFields multiFields,
|
||||
CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.coerce = coerce;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -343,7 +340,6 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
|
||||
Range range;
|
||||
if (context.externalValueSet()) {
|
||||
range = context.parseExternalValue(Range.class);
|
||||
|
@ -394,9 +390,6 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
+ name() + "], expected an object but got " + parser.currentName());
|
||||
}
|
||||
}
|
||||
if (includeInAll) {
|
||||
context.allEntries().addText(fieldType.name(), range.toString(), fieldType.boost());
|
||||
}
|
||||
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
|
||||
boolean docValued = fieldType.hasDocValues();
|
||||
boolean stored = fieldType.stored();
|
||||
|
@ -407,7 +400,6 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
RangeFieldMapper other = (RangeFieldMapper) mergeWith;
|
||||
this.includeInAll = other.includeInAll;
|
||||
if (other.coerce.explicit()) {
|
||||
this.coerce = other.coerce;
|
||||
}
|
||||
|
@ -430,11 +422,6 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
if (includeDefaults || coerce.explicit()) {
|
||||
builder.field("coerce", coerce.value());
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", false);
|
||||
}
|
||||
}
|
||||
|
||||
/** Enum defining the type of range */
|
||||
|
|
|
@ -76,9 +76,9 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
|
||||
@Override
|
||||
protected ObjectMapper createMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic,
|
||||
Boolean includeInAll, Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
Map<String, Mapper> mappers, @Nullable Settings settings) {
|
||||
assert !nested.isNested();
|
||||
return new RootObjectMapper(name, enabled, dynamic, includeInAll, mappers,
|
||||
return new RootObjectMapper(name, enabled, dynamic, mappers,
|
||||
dynamicDateTimeFormatters,
|
||||
dynamicTemplates,
|
||||
dateDetection, numericDetection, settings);
|
||||
|
@ -165,10 +165,10 @@ public class RootObjectMapper extends ObjectMapper {
|
|||
private Explicit<Boolean> numericDetection;
|
||||
private Explicit<DynamicTemplate[]> dynamicTemplates;
|
||||
|
||||
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Boolean includeInAll, Map<String, Mapper> mappers,
|
||||
RootObjectMapper(String name, boolean enabled, Dynamic dynamic, Map<String, Mapper> mappers,
|
||||
Explicit<FormatDateTimeFormatter[]> dynamicDateTimeFormatters, Explicit<DynamicTemplate[]> dynamicTemplates,
|
||||
Explicit<Boolean> dateDetection, Explicit<Boolean> numericDetection, Settings settings) {
|
||||
super(name, name, enabled, Nested.NO, dynamic, includeInAll, mappers, settings);
|
||||
super(name, name, enabled, Nested.NO, dynamic, mappers, settings);
|
||||
this.dynamicTemplates = dynamicTemplates;
|
||||
this.dynamicDateTimeFormatters = dynamicDateTimeFormatters;
|
||||
this.dateDetection = dateDetection;
|
||||
|
|
|
@ -126,7 +126,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
}
|
||||
setupFieldType(context);
|
||||
return new ScaledFloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context),
|
||||
coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -309,8 +309,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
private Explicit<Boolean> coerce;
|
||||
|
@ -321,7 +319,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
MappedFieldType defaultFieldType,
|
||||
Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce,
|
||||
Boolean includeInAll,
|
||||
Settings indexSettings,
|
||||
MultiFields multiFields,
|
||||
CopyTo copyTo) {
|
||||
|
@ -332,7 +329,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
}
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.coerce = coerce;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -352,7 +348,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
Object value;
|
||||
|
@ -375,12 +370,8 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
throw e;
|
||||
}
|
||||
}
|
||||
if (includeInAll) {
|
||||
value = parser.textOrNull(); // preserve formatting
|
||||
} else {
|
||||
value = numericValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
value = fieldType().nullValue();
|
||||
|
@ -394,10 +385,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
numericValue = parse(value);
|
||||
}
|
||||
|
||||
if (includeInAll) {
|
||||
context.allEntries().addText(fieldType().name(), value.toString(), fieldType().boost());
|
||||
}
|
||||
|
||||
double doubleValue = numericValue.doubleValue();
|
||||
if (Double.isFinite(doubleValue) == false) {
|
||||
if (ignoreMalformed.value()) {
|
||||
|
@ -419,7 +406,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
ScaledFloatFieldMapper other = (ScaledFloatFieldMapper) mergeWith;
|
||||
this.includeInAll = other.includeInAll;
|
||||
if (other.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = other.ignoreMalformed;
|
||||
}
|
||||
|
@ -444,12 +430,6 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", false);
|
||||
}
|
||||
}
|
||||
|
||||
static Double parse(Object value) {
|
||||
|
|
|
@ -122,7 +122,7 @@ public class TextFieldMapper extends FieldMapper {
|
|||
}
|
||||
setupFieldType(context);
|
||||
return new TextFieldMapper(
|
||||
name, fieldType, defaultFieldType, positionIncrementGap, includeInAll,
|
||||
name, fieldType, defaultFieldType, positionIncrementGap,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
@ -293,11 +293,10 @@ public class TextFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
private int positionIncrementGap;
|
||||
|
||||
protected TextFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
int positionIncrementGap, Boolean includeInAll,
|
||||
int positionIncrementGap,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
assert fieldType.tokenized();
|
||||
|
@ -306,7 +305,6 @@ public class TextFieldMapper extends FieldMapper {
|
|||
throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + name() + "]");
|
||||
}
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -314,11 +312,6 @@ public class TextFieldMapper extends FieldMapper {
|
|||
return (TextFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
// pkg-private for testing
|
||||
Boolean includeInAll() {
|
||||
return includeInAll;
|
||||
}
|
||||
|
||||
public int getPositionIncrementGap() {
|
||||
return this.positionIncrementGap;
|
||||
}
|
||||
|
@ -336,10 +329,6 @@ public class TextFieldMapper extends FieldMapper {
|
|||
return;
|
||||
}
|
||||
|
||||
if (context.includeInAll(includeInAll, this)) {
|
||||
context.allEntries().addText(fieldType().name(), value, fieldType().boost());
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().name(), value, fieldType());
|
||||
fields.add(field);
|
||||
|
@ -354,7 +343,6 @@ public class TextFieldMapper extends FieldMapper {
|
|||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
this.includeInAll = ((TextFieldMapper) mergeWith).includeInAll;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -367,12 +355,6 @@ public class TextFieldMapper extends FieldMapper {
|
|||
super.doXContentBody(builder, includeDefaults, params);
|
||||
doXContentAnalyzers(builder, includeDefaults);
|
||||
|
||||
if (includeInAll != null) {
|
||||
builder.field("include_in_all", includeInAll);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("include_in_all", true);
|
||||
}
|
||||
|
||||
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||
builder.field("position_increment_gap", positionIncrementGap);
|
||||
}
|
||||
|
|
|
@ -211,18 +211,6 @@ public class TypeParsers {
|
|||
} else if (propName.equals("index_options")) {
|
||||
builder.indexOptions(nodeIndexOptionValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("include_in_all")) {
|
||||
if (parserContext.isWithinMultiField()) {
|
||||
throw new MapperParsingException("include_in_all in multi fields is not allowed. Found the include_in_all in field ["
|
||||
+ name + "] which is within a multi field.");
|
||||
} else if (parserContext.indexVersionCreated().onOrAfter(Version.V_6_0_0_alpha1)) {
|
||||
throw new MapperParsingException("[include_in_all] is not allowed for indices created on or after version 6.0.0 as " +
|
||||
"[_all] is deprecated. As a replacement, you can use an [copy_to] on mapping fields to create your " +
|
||||
"own catch all field.");
|
||||
} else {
|
||||
builder.includeInAll(nodeBooleanValue(name, "include_in_all", propNode, parserContext));
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals("similarity")) {
|
||||
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
|
||||
builder.similarity(similarityProvider);
|
||||
|
|
|
@ -488,7 +488,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
}
|
||||
|
||||
/**
|
||||
* Constructs a new more like this query which uses the "_all" field.
|
||||
* Constructs a new more like this query which uses the default search field.
|
||||
* @param likeTexts the text to use when generating the 'More Like This' query.
|
||||
* @param likeItems the documents to use when generating the 'More Like This' query.
|
||||
*/
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.common.unit.Fuzziness;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.index.search.QueryParserHelper;
|
||||
import org.elasticsearch.index.search.QueryStringQueryParser;
|
||||
|
@ -933,11 +932,6 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue
|
|||
queryParser = new QueryStringQueryParser(context, resolvedFields, isLenient);
|
||||
} else {
|
||||
List<String> defaultFields = context.defaultFields();
|
||||
if (context.getMapperService().allEnabled() == false &&
|
||||
defaultFields.size() == 1 && AllFieldMapper.NAME.equals(defaultFields.get(0))) {
|
||||
// For indices created before 6.0 with _all disabled
|
||||
defaultFields = Collections.singletonList("*");
|
||||
}
|
||||
boolean isAllField = defaultFields.size() == 1 && Regex.isMatchAllPattern(defaultFields.get(0));
|
||||
if (isAllField) {
|
||||
queryParser = new QueryStringQueryParser(context, lenient == null ? true : lenient);
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.common.lucene.search.Queries;
|
|||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.search.QueryParserHelper;
|
||||
import org.elasticsearch.index.search.SimpleQueryStringQueryParser;
|
||||
import org.elasticsearch.index.search.SimpleQueryStringQueryParser.Settings;
|
||||
|
@ -405,11 +404,6 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
resolvedFieldsAndWeights = QueryParserHelper.resolveMappingFields(context, fieldsAndWeights);
|
||||
} else {
|
||||
List<String> defaultFields = context.defaultFields();
|
||||
if (context.getMapperService().allEnabled() == false &&
|
||||
defaultFields.size() == 1 && AllFieldMapper.NAME.equals(defaultFields.get(0))) {
|
||||
// For indices created before 6.0 with _all disabled
|
||||
defaultFields = Collections.singletonList("*");
|
||||
}
|
||||
boolean isAllField = defaultFields.size() == 1 && Regex.isMatchAllPattern(defaultFields.get(0));
|
||||
if (isAllField) {
|
||||
newSettings.lenient(lenientSet ? settings.lenient() : true);
|
||||
|
|
|
@ -48,7 +48,6 @@ import org.elasticsearch.ElasticsearchException;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
|
@ -418,9 +417,6 @@ public class MatchQuery {
|
|||
} else if (innerQuery instanceof TermQuery) {
|
||||
prefixQuery.add(((TermQuery) innerQuery).getTerm());
|
||||
return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost);
|
||||
} else if (innerQuery instanceof AllTermQuery) {
|
||||
prefixQuery.add(((AllTermQuery) innerQuery).getTerm());
|
||||
return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
|
|
@ -43,11 +43,9 @@ import org.apache.lucene.search.spans.SpanOrQuery;
|
|||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.common.lucene.all.AllField;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -613,11 +611,7 @@ public class QueryStringQueryParser extends XQueryParser {
|
|||
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
|
||||
String actualField = field != null ? field : this.field;
|
||||
if (termStr.equals("*") && actualField != null) {
|
||||
/**
|
||||
* We rewrite _all:* to a match all query.
|
||||
* TODO: We can remove this special case when _all is completely removed.
|
||||
*/
|
||||
if (Regex.isMatchAllPattern(actualField) || AllFieldMapper.NAME.equals(actualField)) {
|
||||
if (Regex.isMatchAllPattern(actualField)) {
|
||||
return newMatchAllDocsQuery();
|
||||
}
|
||||
// effectively, we check if a field exists or not
|
||||
|
@ -627,8 +621,6 @@ public class QueryStringQueryParser extends XQueryParser {
|
|||
Map<String, Float> fields = extractMultiFields(field, false);
|
||||
if (fields.isEmpty()) {
|
||||
return newUnmappedFieldQuery(termStr);
|
||||
} else if (fields.containsKey(AllFieldMapper.NAME)) {
|
||||
return newMatchAllDocsQuery();
|
||||
}
|
||||
List<Query> queries = new ArrayList<>();
|
||||
for (Map.Entry<String, Float> entry : fields.entrySet()) {
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.index.shard.PrimaryReplicaSyncer;
|
|||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.CompletionFieldMapper;
|
||||
|
@ -138,7 +137,6 @@ public class IndicesModule extends AbstractModule {
|
|||
metadataMappers.put(IndexFieldMapper.NAME, new IndexFieldMapper.TypeParser());
|
||||
metadataMappers.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser());
|
||||
metadataMappers.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser());
|
||||
metadataMappers.put(AllFieldMapper.NAME, new AllFieldMapper.TypeParser());
|
||||
metadataMappers.put(VersionFieldMapper.NAME, new VersionFieldMapper.TypeParser());
|
||||
metadataMappers.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser());
|
||||
metadataMappers.put(SeqNoFieldMapper.NAME, new SeqNoFieldMapper.TypeParser());
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.apache.lucene.search.TopDocs;
|
|||
import org.apache.lucene.search.highlight.DefaultEncoder;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
@ -148,18 +147,6 @@ public class CustomUnifiedHighlighterTests extends ESTestCase {
|
|||
BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs);
|
||||
}
|
||||
|
||||
public void testAllTermQuery() throws Exception {
|
||||
final String[] inputs = {
|
||||
"The quick brown fox."
|
||||
};
|
||||
final String[] outputs = {
|
||||
"The quick brown <b>fox</b>."
|
||||
};
|
||||
AllTermQuery query = new AllTermQuery(new Term("text", "fox"));
|
||||
assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT,
|
||||
BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs);
|
||||
}
|
||||
|
||||
public void testCommonTermsQuery() throws Exception {
|
||||
final String[] inputs = {
|
||||
"The quick brown fox."
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.index.analysis.CharFilterFactory;
|
|||
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
||||
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter;
|
||||
|
@ -127,7 +126,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("the quick brown fox");
|
||||
request.analyzer("standard");
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, null, registry, environment);
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment);
|
||||
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
|
||||
assertEquals(4, tokens.size());
|
||||
|
||||
|
@ -136,7 +135,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
request.text("the qu1ck brown fox");
|
||||
request.tokenizer("standard");
|
||||
request.addTokenFilter("mock");
|
||||
analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment);
|
||||
analyze = TransportAnalyzeAction.analyze(request, "text", null, randomBoolean() ? indexAnalyzers : null, registry, environment);
|
||||
tokens = analyze.getTokens();
|
||||
assertEquals(3, tokens.size());
|
||||
assertEquals("qu1ck", tokens.get(0).getTerm());
|
||||
|
@ -148,7 +147,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
request.text("the qu1ck brown fox");
|
||||
request.tokenizer("standard");
|
||||
request.addCharFilter("append_foo");
|
||||
analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment);
|
||||
analyze = TransportAnalyzeAction.analyze(request, "text", null, randomBoolean() ? indexAnalyzers : null, registry, environment);
|
||||
tokens = analyze.getTokens();
|
||||
assertEquals(4, tokens.size());
|
||||
assertEquals("the", tokens.get(0).getTerm());
|
||||
|
@ -162,7 +161,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
request.tokenizer("standard");
|
||||
request.addCharFilter("append");
|
||||
request.text("the qu1ck brown fox");
|
||||
analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment);
|
||||
analyze = TransportAnalyzeAction.analyze(request, "text", null, randomBoolean() ? indexAnalyzers : null, registry, environment);
|
||||
tokens = analyze.getTokens();
|
||||
assertEquals(4, tokens.size());
|
||||
assertEquals("the", tokens.get(0).getTerm());
|
||||
|
@ -175,7 +174,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.analyzer("standard");
|
||||
request.text("the 1 brown fox");
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, null, registry, environment);
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment);
|
||||
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
|
||||
assertEquals(4, tokens.size());
|
||||
assertEquals("the", tokens.get(0).getTerm());
|
||||
|
@ -207,7 +206,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("the quick brown fox");
|
||||
request.analyzer("custom_analyzer");
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment);
|
||||
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
|
||||
assertEquals(3, tokens.size());
|
||||
assertEquals("quick", tokens.get(0).getTerm());
|
||||
|
@ -215,7 +214,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
assertEquals("fox", tokens.get(2).getTerm());
|
||||
|
||||
request.analyzer("standard");
|
||||
analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
|
||||
analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment);
|
||||
tokens = analyze.getTokens();
|
||||
assertEquals(4, tokens.size());
|
||||
assertEquals("the", tokens.get(0).getTerm());
|
||||
|
@ -226,7 +225,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
// Switch the analyzer out for just a tokenizer
|
||||
request.analyzer(null);
|
||||
request.tokenizer("standard");
|
||||
analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
|
||||
analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment);
|
||||
tokens = analyze.getTokens();
|
||||
assertEquals(4, tokens.size());
|
||||
assertEquals("the", tokens.get(0).getTerm());
|
||||
|
@ -236,7 +235,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
|
||||
// Now try applying our token filter
|
||||
request.addTokenFilter("mock");
|
||||
analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
|
||||
analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment);
|
||||
tokens = analyze.getTokens();
|
||||
assertEquals(3, tokens.size());
|
||||
assertEquals("quick", tokens.get(0).getTerm());
|
||||
|
@ -250,7 +249,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
new AnalyzeRequest()
|
||||
.analyzer("custom_analyzer")
|
||||
.text("the qu1ck brown fox-dog"),
|
||||
AllFieldMapper.NAME, null, null, registry, environment));
|
||||
"text", null, null, registry, environment));
|
||||
assertEquals(e.getMessage(), "failed to find global analyzer [custom_analyzer]");
|
||||
}
|
||||
|
||||
|
@ -261,7 +260,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
new AnalyzeRequest()
|
||||
.analyzer("foobar")
|
||||
.text("the qu1ck brown fox"),
|
||||
AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
"text", null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
if (notGlobal) {
|
||||
assertEquals(e.getMessage(), "failed to find analyzer [foobar]");
|
||||
} else {
|
||||
|
@ -273,7 +272,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
new AnalyzeRequest()
|
||||
.tokenizer("foobar")
|
||||
.text("the qu1ck brown fox"),
|
||||
AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
"text", null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
if (notGlobal) {
|
||||
assertEquals(e.getMessage(), "failed to find tokenizer under [foobar]");
|
||||
} else {
|
||||
|
@ -286,7 +285,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
.tokenizer("whitespace")
|
||||
.addTokenFilter("foobar")
|
||||
.text("the qu1ck brown fox"),
|
||||
AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
"text", null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
if (notGlobal) {
|
||||
assertEquals(e.getMessage(), "failed to find token filter under [foobar]");
|
||||
} else {
|
||||
|
@ -300,7 +299,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
.addTokenFilter("lowercase")
|
||||
.addCharFilter("foobar")
|
||||
.text("the qu1ck brown fox"),
|
||||
AllFieldMapper.NAME, null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
"text", null, notGlobal ? indexAnalyzers : null, registry, environment));
|
||||
if (notGlobal) {
|
||||
assertEquals(e.getMessage(), "failed to find char filter under [foobar]");
|
||||
} else {
|
||||
|
@ -312,7 +311,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
new AnalyzeRequest()
|
||||
.normalizer("foobar")
|
||||
.text("the qu1ck brown fox"),
|
||||
AllFieldMapper.NAME, null, indexAnalyzers, registry, environment));
|
||||
"text", null, indexAnalyzers, registry, environment));
|
||||
assertEquals(e.getMessage(), "failed to find normalizer under [foobar]");
|
||||
}
|
||||
|
||||
|
@ -321,7 +320,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
request.tokenizer("whitespace");
|
||||
request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters()
|
||||
request.text("the quick brown fox");
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment);
|
||||
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
|
||||
assertEquals(3, tokens.size());
|
||||
assertEquals("quick", tokens.get(0).getTerm());
|
||||
|
@ -333,7 +332,7 @@ public class TransportAnalyzeActionTests extends ESTestCase {
|
|||
AnalyzeRequest request = new AnalyzeRequest("index");
|
||||
request.normalizer("my_normalizer");
|
||||
request.text("ABc");
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, indexAnalyzers, registry, environment);
|
||||
AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment);
|
||||
List<AnalyzeResponse.AnalyzeToken> tokens = analyze.getTokens();
|
||||
|
||||
assertEquals(1, tokens.size());
|
||||
|
|
|
@ -47,9 +47,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.TypeParsers;
|
||||
import org.elasticsearch.rest.action.document.RestTermVectorsAction;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -303,7 +303,7 @@ public class TermVectorsUnitTests extends ESTestCase {
|
|||
ft.setStoreTermVectorPositions(true);
|
||||
String ftOpts = FieldMapper.termVectorOptionsToString(ft);
|
||||
assertThat("with_positions_payloads", equalTo(ftOpts));
|
||||
AllFieldMapper.Builder builder = new AllFieldMapper.Builder(null);
|
||||
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(null);
|
||||
boolean exceptiontrown = false;
|
||||
try {
|
||||
TypeParsers.parseTermVector("", ftOpts, builder);
|
||||
|
|
|
@ -1,279 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.all;
|
||||
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SimpleAllTests extends ESTestCase {
|
||||
private FieldType getAllFieldType() {
|
||||
FieldType ft = new FieldType();
|
||||
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
ft.setTokenized(true);
|
||||
ft.freeze();
|
||||
return ft;
|
||||
}
|
||||
|
||||
private void assertExplanationScore(IndexSearcher searcher, Query query, ScoreDoc scoreDoc) throws IOException {
|
||||
final Explanation expl = searcher.explain(query, scoreDoc.doc);
|
||||
assertEquals(scoreDoc.score, expl.getValue(), 0.00001f);
|
||||
}
|
||||
|
||||
public void testSimpleAllNoBoost() throws Exception {
|
||||
FieldType allFt = getAllFieldType();
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "something", 1.0f, allFt));
|
||||
doc.add(new AllField("_all", "else", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "else", 1.0f, allFt));
|
||||
doc.add(new AllField("_all", "something", 1.0f, allFt));
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = DirectoryReader.open(indexWriter);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
Query query = new AllTermQuery(new Term("_all", "else"));
|
||||
TopDocs docs = searcher.search(query, 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
|
||||
|
||||
query = new AllTermQuery(new Term("_all", "something"));
|
||||
docs = searcher.search(query, 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
public void testSimpleAllWithBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||
|
||||
FieldType allFt = getAllFieldType();
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "something", 1.0f, allFt));
|
||||
doc.add(new AllField("_all", "else", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "else", 2.0f, allFt));
|
||||
doc.add(new AllField("_all", "something", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = DirectoryReader.open(indexWriter);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
// this one is boosted. so the second doc is more relevant
|
||||
Query query = new AllTermQuery(new Term("_all", "else"));
|
||||
TopDocs docs = searcher.search(query, 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(1));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(0));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
|
||||
|
||||
query = new AllTermQuery(new Term("_all", "something"));
|
||||
docs = searcher.search(query, 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[0]);
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
assertExplanationScore(searcher, query, docs.scoreDocs[1]);
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
public void testTermMissingFromOneSegment() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||
|
||||
FieldType allFt = getAllFieldType();
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "something", 2.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
indexWriter.commit();
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "else", 1.0f, allFt));
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = DirectoryReader.open(indexWriter);
|
||||
assertEquals(2, reader.leaves().size());
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
// "something" only appears in the first segment:
|
||||
Query query = new AllTermQuery(new Term("_all", "something"));
|
||||
TopDocs docs = searcher.search(query, 10);
|
||||
assertEquals(1, docs.totalHits);
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
public void testMultipleTokensAllNoBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||
|
||||
FieldType allFt = getAllFieldType();
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "something moo", 1.0f, allFt));
|
||||
doc.add(new AllField("_all", "else koo", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "else koo", 1.0f, allFt));
|
||||
doc.add(new AllField("_all", "something moo", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = DirectoryReader.open(indexWriter);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
public void testMultipleTokensAllWithBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
|
||||
|
||||
FieldType allFt = getAllFieldType();
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "something moo", 1.0f, allFt));
|
||||
doc.add(new AllField("_all", "else koo", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "else koo", 2.0f, allFt));
|
||||
doc.add(new AllField("_all", "something moo", 1.0f, allFt));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = DirectoryReader.open(indexWriter);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(0));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(0));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
public void testNoTokens() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.KEYWORD_ANALYZER));
|
||||
|
||||
FieldType allFt = getAllFieldType();
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", StoredField.TYPE));
|
||||
doc.add(new AllField("_all", "", 2.0f, allFt));
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = DirectoryReader.open(indexWriter);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10);
|
||||
assertThat(docs.totalHits, equalTo(1L));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
}
|
||||
}
|
|
@ -23,11 +23,9 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.queryparser.classic.ParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.lucene.all.AllTokenStream;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -126,7 +124,7 @@ public class SynonymsAnalysisTests extends ESTestCase {
|
|||
private void match(String analyzerName, String source, String target) throws IOException {
|
||||
Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
|
||||
|
||||
TokenStream stream = AllTokenStream.allTokenStream("_all", source, 1.0f, analyzer);
|
||||
TokenStream stream = analyzer.tokenStream("", source);
|
||||
stream.reset();
|
||||
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
|
||||
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
||||
|
||||
public class AllFieldIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created
|
||||
}
|
||||
|
||||
public void test5xIndicesContinueToUseAll() throws Exception {
|
||||
// Default 5.x settings
|
||||
assertAcked(prepareCreate("test").setSettings("index.version.created", Version.V_5_1_1.id));
|
||||
client().prepareIndex("test", "type", "1").setSource("body", "foo").get();
|
||||
refresh();
|
||||
SearchResponse resp = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("_all", "foo")).get();
|
||||
assertHitCount(resp, 1);
|
||||
assertSearchHits(resp, "1");
|
||||
|
||||
// _all explicitly enabled
|
||||
assertAcked(prepareCreate("test2")
|
||||
.setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject("type")
|
||||
.startObject("_all")
|
||||
.field("enabled", true)
|
||||
.endObject() // _all
|
||||
.endObject() // type
|
||||
.endObject() // mappings
|
||||
.endObject())
|
||||
.setSettings("index.version.created", Version.V_5_4_0_ID));
|
||||
client().prepareIndex("test2", "type", "1").setSource("foo", "bar").get();
|
||||
refresh();
|
||||
resp = client().prepareSearch("test2").setQuery(QueryBuilders.matchQuery("_all", "bar")).get();
|
||||
assertHitCount(resp, 1);
|
||||
assertSearchHits(resp, "1");
|
||||
|
||||
// _all explicitly disabled
|
||||
assertAcked(prepareCreate("test3")
|
||||
.setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject("type")
|
||||
.startObject("_all")
|
||||
.field("enabled", false)
|
||||
.endObject() // _all
|
||||
.endObject() // type
|
||||
.endObject() // mappings
|
||||
.endObject())
|
||||
.setSettings("index.version.created", Version.V_5_4_0_ID));
|
||||
client().prepareIndex("test3", "type", "1").setSource("foo", "baz").get();
|
||||
refresh();
|
||||
resp = client().prepareSearch("test3").setQuery(QueryBuilders.matchQuery("_all", "baz")).get();
|
||||
assertHitCount(resp, 0);
|
||||
|
||||
// _all present, but not enabled or disabled (default settings)
|
||||
assertAcked(prepareCreate("test4")
|
||||
.setSource(jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject("type")
|
||||
.startObject("_all")
|
||||
.endObject() // _all
|
||||
.endObject() // type
|
||||
.endObject() // mappings
|
||||
.endObject())
|
||||
.setSettings("index.version.created", Version.V_5_4_0_ID));
|
||||
client().prepareIndex("test4", "type", "1").setSource("foo", "eggplant").get();
|
||||
refresh();
|
||||
resp = client().prepareSearch("test4").setQuery(QueryBuilders.matchQuery("_all", "eggplant")).get();
|
||||
assertHitCount(resp, 1);
|
||||
assertSearchHits(resp, "1");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
public class AllFieldTypeTests extends FieldTypeTestCase {
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new AllFieldMapper.AllFieldType();
|
||||
}
|
||||
}
|
|
@ -262,7 +262,6 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase {
|
|||
// a whole lot of bogus settings right now it picks up from calling super.doXContentBody...
|
||||
assertTrue(got, got.contains("\"null_value\":null"));
|
||||
assertTrue(got, got.contains("\"ignore_malformed\":false"));
|
||||
assertTrue(got, got.contains("\"include_in_all\":false"));
|
||||
}
|
||||
|
||||
public void testEmptyName() throws IOException {
|
||||
|
|
|
@ -245,21 +245,6 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
|||
assertNotSame(indexService.mapperService().documentMapper("type1"), documentMapper);
|
||||
}
|
||||
|
||||
public void testAllEnabled() throws Exception {
|
||||
IndexService indexService = createIndex("test");
|
||||
assertFalse(indexService.mapperService().allEnabled());
|
||||
|
||||
CompressedXContent enabledAll = new CompressedXContent(XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("_all")
|
||||
.field("enabled", true)
|
||||
.endObject().endObject().bytes());
|
||||
|
||||
Exception e = expectThrows(MapperParsingException.class,
|
||||
() -> indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, enabledAll,
|
||||
MergeReason.MAPPING_UPDATE, random().nextBoolean()));
|
||||
assertThat(e.getMessage(), containsString("[_all] is disabled in 6.0"));
|
||||
}
|
||||
|
||||
public void testPartitionedConstraints() {
|
||||
// partitioned index must have routing
|
||||
IllegalArgumentException noRoutingException = expectThrows(IllegalArgumentException.class, () -> {
|
||||
|
|
|
@ -45,37 +45,4 @@ public class MapperTests extends ESTestCase {
|
|||
NullPointerException e = expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1)));
|
||||
}
|
||||
|
||||
public void testExceptionForIncludeInAll() throws IOException {
|
||||
XContentBuilder mapping = createMappingWithIncludeInAll();
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
|
||||
final MapperService currentMapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings, "test");
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
currentMapperService.parse("type", new CompressedXContent(mapping.string()), true));
|
||||
assertEquals("[include_in_all] is not allowed for indices created on or after version 6.0.0 as [_all] is deprecated. " +
|
||||
"As a replacement, you can use an [copy_to] on mapping fields to create your own catch all field.",
|
||||
e.getMessage());
|
||||
|
||||
settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_3_0).build();
|
||||
|
||||
// Create the mapping service with an older index creation version
|
||||
final MapperService oldMapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings, "test");
|
||||
// Should not throw an exception now
|
||||
oldMapperService.parse("type", new CompressedXContent(mapping.string()), true);
|
||||
}
|
||||
|
||||
private static XContentBuilder createMappingWithIncludeInAll() throws IOException {
|
||||
return jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "text")
|
||||
.field("include_in_all", randomBoolean())
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.MapperTestUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
|
||||
public class MultiFieldIncludeInAllMapperTests extends ESTestCase {
|
||||
public void testExceptionForIncludeInAllInMultiFields() throws IOException {
|
||||
XContentBuilder mapping = createMappingWithIncludeInAllInMultiField();
|
||||
|
||||
// first check that for newer versions we throw exception if include_in_all is found withing multi field
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "test");
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
mapperService.parse("type", new CompressedXContent(mapping.string()), true));
|
||||
assertEquals("include_in_all in multi fields is not allowed. Found the include_in_all in field [c] which is within a multi field.",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
private static XContentBuilder createMappingWithIncludeInAllInMultiField() throws IOException {
|
||||
XContentBuilder mapping = jsonBuilder();
|
||||
mapping.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("a")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("b")
|
||||
.field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("c")
|
||||
.field("type", "text")
|
||||
.field("include_in_all", false)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
return mapping;
|
||||
}
|
||||
}
|
|
@ -182,15 +182,12 @@ public class ObjectMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject().endObject().string();
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
DocumentMapper mapper = mapperService.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false);
|
||||
assertNull(mapper.root().includeInAll());
|
||||
assertNull(mapper.root().dynamic());
|
||||
String update = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("type")
|
||||
.field("include_in_all", false)
|
||||
.field("dynamic", "strict")
|
||||
.endObject().endObject().string();
|
||||
mapper = mapperService.merge("type", new CompressedXContent(update), MergeReason.MAPPING_UPDATE, false);
|
||||
assertFalse(mapper.root().includeInAll());
|
||||
assertEquals(Dynamic.STRICT, mapper.root().dynamic());
|
||||
}
|
||||
|
||||
|
|
|
@ -61,44 +61,6 @@ public class UpdateMappingOnClusterIT extends ESIntegTestCase {
|
|||
|
||||
}
|
||||
|
||||
public void testUpdatingAllSettingsOnOlderIndex() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject(TYPE)
|
||||
.startObject("_all").field("enabled", "true").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("_all").field("enabled", "false").endObject()
|
||||
.startObject("properties").startObject("text").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
String errorMessage = "[_all] enabled is true now encountering false";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), Version.V_5_0_0, errorMessage);
|
||||
}
|
||||
|
||||
public void testUpdatingAllSettingsOnOlderIndexDisabledToEnabled() throws Exception {
|
||||
XContentBuilder mapping = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("mappings")
|
||||
.startObject(TYPE)
|
||||
.startObject("_all").field("enabled", "false").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
XContentBuilder mappingUpdate = jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("_all").field("enabled", "true").endObject()
|
||||
.startObject("properties").startObject("text").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
String errorMessage = "[_all] enabled is false now encountering true";
|
||||
testConflict(mapping.string(), mappingUpdate.string(), Version.V_5_0_0, errorMessage);
|
||||
}
|
||||
|
||||
private void compareMappingOnNodes(GetMappingsResponse previousMapping) {
|
||||
// make sure all nodes have same cluster state
|
||||
for (Client client : cluster().getClients()) {
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.lucene.search.PointRangeQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder.Type;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
|
@ -144,7 +143,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
|
|||
@Override
|
||||
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
// we rely on integration tests for deeper checks here
|
||||
assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
|
||||
assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(TermQuery.class))
|
||||
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
|
||||
.or(instanceOf(FuzzyQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
|
||||
.or(instanceOf(MatchAllDocsQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
|
||||
|
|
|
@ -50,7 +50,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
|
@ -172,7 +171,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
@Override
|
||||
protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder,
|
||||
Query query, SearchContext context) throws IOException {
|
||||
assertThat(query, either(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class))
|
||||
assertThat(query, either(instanceOf(TermQuery.class))
|
||||
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
|
||||
.or(instanceOf(PhraseQuery.class)).or(instanceOf(BoostQuery.class))
|
||||
.or(instanceOf(MultiPhrasePrefixQuery.class)).or(instanceOf(PrefixQuery.class)).or(instanceOf(SpanQuery.class))
|
||||
|
@ -811,11 +810,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||
}
|
||||
assertThat(query, equalTo(expected));
|
||||
|
||||
queryBuilder = new QueryStringQueryBuilder("_all:*");
|
||||
query = queryBuilder.toQuery(context);
|
||||
expected = new MatchAllDocsQuery();
|
||||
assertThat(query, equalTo(expected));
|
||||
|
||||
queryBuilder = new QueryStringQueryBuilder("*:*");
|
||||
query = queryBuilder.toQuery(context);
|
||||
expected = new MatchAllDocsQuery();
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.lucene.search.spans.SpanTermQuery;
|
|||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.search.SimpleQueryStringQueryParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -269,7 +268,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
}
|
||||
} else if (queryBuilder.fields().size() == 0) {
|
||||
assertThat(query, either(instanceOf(DisjunctionMaxQuery.class))
|
||||
.or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(TermQuery.class)).or(instanceOf(AllTermQuery.class)));
|
||||
.or(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(TermQuery.class)));
|
||||
if (query instanceof DisjunctionMaxQuery) {
|
||||
for (Query disjunct : (DisjunctionMaxQuery) query) {
|
||||
assertThat(disjunct, either(instanceOf(TermQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||
|
|
|
@ -176,16 +176,6 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
public void testMultiMatchPrefixWithAllField() throws IOException {
|
||||
QueryShardContext queryShardContext = indexService.newQueryShardContext(
|
||||
randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null);
|
||||
queryShardContext.setAllowUnmappedFields(true);
|
||||
Query parsedQuery =
|
||||
multiMatchQuery("foo").field("_all").type(MultiMatchQueryBuilder.Type.PHRASE_PREFIX).toQuery(queryShardContext);
|
||||
assertThat(parsedQuery, instanceOf(MultiPhrasePrefixQuery.class));
|
||||
assertThat(parsedQuery.toString(), equalTo("_all:\"foo*\""));
|
||||
}
|
||||
|
||||
public void testMultiMatchCrossFieldsWithSynonyms() throws IOException {
|
||||
QueryShardContext queryShardContext = indexService.newQueryShardContext(
|
||||
randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null);
|
||||
|
|
|
@ -61,7 +61,7 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
|
||||
public class SearchHitTests extends ESTestCase {
|
||||
|
||||
private static Set<String> META_FIELDS = Sets.newHashSet("_uid", "_all", "_parent", "_routing", "_size", "_timestamp", "_ttl");
|
||||
private static Set<String> META_FIELDS = Sets.newHashSet("_uid", "_parent", "_routing", "_size", "_timestamp", "_ttl");
|
||||
|
||||
public static SearchHit createTestItem(boolean withOptionalInnerHits) {
|
||||
int internalId = randomInt();
|
||||
|
|
|
@ -208,37 +208,24 @@ public class QueryStringIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testAllFields() throws Exception {
|
||||
String indexBodyWithAll = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index-with-all.json");
|
||||
String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json");
|
||||
|
||||
// Defaults to index.query.default_field=_all
|
||||
prepareCreate("test_1").setSource(indexBodyWithAll, XContentType.JSON).get();
|
||||
Settings.Builder settings = Settings.builder().put("index.query.default_field", "*");
|
||||
prepareCreate("test_2").setSource(indexBody, XContentType.JSON).setSettings(settings).get();
|
||||
ensureGreen("test_1","test_2");
|
||||
prepareCreate("test_1").setSource(indexBody, XContentType.JSON).setSettings(settings).get();
|
||||
ensureGreen("test_1");
|
||||
|
||||
List<IndexRequestBuilder> reqs = new ArrayList<>();
|
||||
reqs.add(client().prepareIndex("test_1", "doc", "1").setSource("f1", "foo", "f2", "eggplant"));
|
||||
reqs.add(client().prepareIndex("test_2", "doc", "1").setSource("f1", "foo", "f2", "eggplant"));
|
||||
indexRandom(true, false, reqs);
|
||||
|
||||
SearchResponse resp = client().prepareSearch("test_1").setQuery(
|
||||
queryStringQuery("foo eggplant").defaultOperator(Operator.AND)).get();
|
||||
assertHitCount(resp, 0L);
|
||||
|
||||
resp = client().prepareSearch("test_2").setQuery(
|
||||
queryStringQuery("foo eggplant").defaultOperator(Operator.AND)).get();
|
||||
assertHitCount(resp, 0L);
|
||||
|
||||
resp = client().prepareSearch("test_1").setQuery(
|
||||
queryStringQuery("foo eggplant").defaultOperator(Operator.OR)).get();
|
||||
assertHits(resp.getHits(), "1");
|
||||
assertHitCount(resp, 1L);
|
||||
|
||||
resp = client().prepareSearch("test_2").setQuery(
|
||||
queryStringQuery("foo eggplant").defaultOperator(Operator.OR)).get();
|
||||
assertHits(resp.getHits(), "1");
|
||||
assertHitCount(resp, 1L);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -528,28 +528,6 @@ public class SimpleQueryStringIT extends ESIntegTestCase {
|
|||
assertHitCount(resp, 2L);
|
||||
}
|
||||
|
||||
public void testExplicitAllFieldsRequested() throws Exception {
|
||||
String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index-with-all.json");
|
||||
prepareCreate("test")
|
||||
.setSource(indexBody, XContentType.JSON)
|
||||
// .setSettings(Settings.builder().put("index.version.created", Version.V_5_0_0.id)).get();
|
||||
.get();
|
||||
ensureGreen("test");
|
||||
|
||||
List<IndexRequestBuilder> reqs = new ArrayList<>();
|
||||
reqs.add(client().prepareIndex("test", "doc", "1").setSource("f1", "foo", "f2", "eggplant"));
|
||||
indexRandom(true, false, reqs);
|
||||
|
||||
SearchResponse resp = client().prepareSearch("test").setQuery(
|
||||
simpleQueryStringQuery("foo eggplant").defaultOperator(Operator.AND)).get();
|
||||
assertHitCount(resp, 0L);
|
||||
|
||||
resp = client().prepareSearch("test").setQuery(
|
||||
simpleQueryStringQuery("foo eggplant").defaultOperator(Operator.AND).useAllFields(true)).get();
|
||||
assertHits(resp.getHits(), "1");
|
||||
assertHitCount(resp, 1L);
|
||||
}
|
||||
|
||||
public void testAllFieldsWithSpecifiedLeniency() throws IOException {
|
||||
String indexBody = copyToStringFromClasspath("/org/elasticsearch/search/query/all-query-index.json");
|
||||
prepareCreate("test").setSource(indexBody, XContentType.JSON).get();
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
{
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"version": {
|
||||
"created": "5000099"
|
||||
},
|
||||
"query.default_field": "f1"
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"_all": {
|
||||
"enabled": true
|
||||
},
|
||||
"properties": {
|
||||
"f1": {"type": "text"},
|
||||
"f2": {"type": "text"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -142,13 +142,6 @@ Larger shards are going to be more efficient at storing data. To increase the si
|
|||
|
||||
Keep in mind that large shard sizes come with drawbacks, such as long full recovery times.
|
||||
|
||||
[float]
|
||||
=== Disable `_all`
|
||||
|
||||
The <<mapping-all-field,`_all`>> field indexes the value of all fields of a
|
||||
document and can use significant space. If you never need to search against all
|
||||
fields at the same time, it can be disabled.
|
||||
|
||||
[float]
|
||||
=== Disable `_source`
|
||||
|
||||
|
|
|
@ -9,8 +9,6 @@ are stored and indexed. For instance, use mappings to define:
|
|||
|
||||
* which string fields should be treated as full text fields.
|
||||
* which fields contain numbers, dates, or geolocations.
|
||||
* whether the values of all fields in the document should be
|
||||
indexed into the catch-all <<mapping-all-field,`_all`>> field.
|
||||
* the <<mapping-date-format,format>> of date values.
|
||||
* custom rules to control the mapping for
|
||||
<<dynamic-mapping,dynamically added fields>>.
|
||||
|
|
|
@ -40,10 +40,6 @@ can be customised when a mapping type is created.
|
|||
[float]
|
||||
=== Indexing meta-fields
|
||||
|
||||
<<mapping-all-field,`_all`>>::
|
||||
|
||||
A _catch-all_ field that indexes the values of all other fields. Disabled by default.
|
||||
|
||||
<<mapping-field-names-field,`_field_names`>>::
|
||||
|
||||
All fields in the document which contain non-null values.
|
||||
|
@ -63,8 +59,6 @@ can be customised when a mapping type is created.
|
|||
Application specific metadata.
|
||||
|
||||
|
||||
include::fields/all-field.asciidoc[]
|
||||
|
||||
include::fields/field-names-field.asciidoc[]
|
||||
|
||||
include::fields/id-field.asciidoc[]
|
||||
|
|
|
@ -1,360 +0,0 @@
|
|||
[[mapping-all-field]]
|
||||
=== `_all` field
|
||||
|
||||
deprecated[6.0.0, `_all` may no longer be enabled for indices created in 6.0+, use a custom field and the mapping `copy_to` parameter]
|
||||
|
||||
The `_all` field is a special _catch-all_ field which concatenates the values
|
||||
of all of the other fields into one big string, using space as a delimiter, which is then
|
||||
<<analysis,analyzed>> and indexed, but not stored. This means that it can be
|
||||
searched, but not retrieved.
|
||||
|
||||
The `_all` field allows you to search for values in documents without knowing
|
||||
which field contains the value. This makes it a useful option when getting
|
||||
started with a new dataset. For instance:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT /my_index
|
||||
{
|
||||
"mapping": {
|
||||
"user": {
|
||||
"_all": {
|
||||
"enabled": true <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /my_index/user/1 <2>
|
||||
{
|
||||
"first_name": "John",
|
||||
"last_name": "Smith",
|
||||
"date_of_birth": "1970-10-24"
|
||||
}
|
||||
|
||||
GET /my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"_all": "john smith 1970"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
<1> Enabling the `_all` field
|
||||
<2> The `_all` field will contain the terms: [ `"john"`, `"smith"`, `"1970"`, `"10"`, `"24"` ]
|
||||
|
||||
[NOTE]
|
||||
.All values treated as strings
|
||||
=============================================================================
|
||||
|
||||
The `date_of_birth` field in the above example is recognised as a `date` field
|
||||
and so will index a single term representing `1970-10-24 00:00:00 UTC`. The
|
||||
`_all` field, however, treats all values as strings, so the date value is
|
||||
indexed as the three string terms: `"1970"`, `"24"`, `"10"`.
|
||||
|
||||
It is important to note that the `_all` field combines the original values
|
||||
from each field as a string. It does not combine the _terms_ from each field.
|
||||
|
||||
=============================================================================
|
||||
|
||||
The `_all` field is just a <<text,`text`>> field, and accepts the same
|
||||
parameters that other string fields accept, including `analyzer`,
|
||||
`term_vectors`, `index_options`, and `store`.
|
||||
|
||||
The `_all` field can be useful, especially when exploring new data using
|
||||
simple filtering. However, by concatenating field values into one big string,
|
||||
the `_all` field loses the distinction between short fields (more relevant)
|
||||
and long fields (less relevant). For use cases where search relevance is
|
||||
important, it is better to query individual fields specifically.
|
||||
|
||||
The `_all` field is not free: it requires extra CPU cycles and uses more disk
|
||||
space. For this reason, it is disabled by default. If needed, it can be
|
||||
<<enabling-all-field,enabled>>.
|
||||
|
||||
[[querying-all-field]]
|
||||
==== Using the `_all` field in queries
|
||||
|
||||
The <<query-dsl-query-string-query,`query_string`>> and
|
||||
<<query-dsl-simple-query-string-query,`simple_query_string`>> queries query the
|
||||
`_all` field by default if it is enabled, unless another field is specified:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
GET _search
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"query": "john smith new york"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The same goes for the `?q=` parameter in <<search-uri-request, URI search
|
||||
requests>> (which is rewritten to a `query_string` query internally):
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
GET _search?q=john+smith+new+york
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
Other queries, such as the <<query-dsl-match-query,`match`>> and
|
||||
<<query-dsl-term-query,`term`>> queries require you to specify the `_all` field
|
||||
explicitly, as per the <<mapping-all-field,first example>>.
|
||||
|
||||
[[enabling-all-field]]
|
||||
==== Enabling the `_all` field
|
||||
|
||||
The `_all` field can be enabled per-type by setting `enabled` to `true`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"type_1": { <1>
|
||||
"properties": {...}
|
||||
},
|
||||
"type_2": { <2>
|
||||
"_all": {
|
||||
"enabled": true
|
||||
},
|
||||
"properties": {...}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[s/\.\.\.//]
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
<1> The `_all` field in `type_1` is disabled.
|
||||
<2> The `_all` field in `type_2` is enabled.
|
||||
|
||||
If the `_all` field is enabled, then URI search requests and the `query_string`
|
||||
and `simple_query_string` queries can automatically use it for queries (see
|
||||
<<querying-all-field>>). You can configure them to use a different field with
|
||||
the `index.query.default_field` setting:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"my_type": {
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"index.query.default_field": "content" <1>
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// CONSOLE
|
||||
|
||||
<1> The `query_string` query will default to querying the `content` field in this index.
|
||||
|
||||
[[all-field-and-boosting]]
|
||||
==== Index boosting and the `_all` field
|
||||
|
||||
Individual fields can be _boosted_ at index time, with the <<mapping-boost,`boost`>>
|
||||
parameter. The `_all` field takes these boosts into account:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT myindex
|
||||
{
|
||||
"mappings": {
|
||||
"mytype": {
|
||||
"_all": {"enabled": true},
|
||||
"properties": {
|
||||
"title": { <1>
|
||||
"type": "text",
|
||||
"boost": 2
|
||||
},
|
||||
"content": { <1>
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
<1> When querying the `_all` field, words that originated in the
|
||||
`title` field are twice as relevant as words that originated in
|
||||
the `content` field.
|
||||
|
||||
WARNING: Using index-time boosting with the `_all` field has a significant
|
||||
impact on query performance. Usually the better solution is to query fields
|
||||
individually, with optional query time boosting.
|
||||
|
||||
|
||||
[[custom-all-fields]]
|
||||
==== Custom `_all` fields
|
||||
|
||||
While there is only a single `_all` field per index, the <<copy-to,`copy_to`>>
|
||||
parameter allows the creation of multiple __custom `_all` fields__. For
|
||||
instance, `first_name` and `last_name` fields can be combined together into
|
||||
the `full_name` field:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT myindex
|
||||
{
|
||||
"mappings": {
|
||||
"mytype": {
|
||||
"properties": {
|
||||
"first_name": {
|
||||
"type": "text",
|
||||
"copy_to": "full_name" <1>
|
||||
},
|
||||
"last_name": {
|
||||
"type": "text",
|
||||
"copy_to": "full_name" <1>
|
||||
},
|
||||
"full_name": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT myindex/mytype/1
|
||||
{
|
||||
"first_name": "John",
|
||||
"last_name": "Smith"
|
||||
}
|
||||
|
||||
GET myindex/_search
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"full_name": "John Smith"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// CONSOLE
|
||||
|
||||
<1> The `first_name` and `last_name` values are copied to the `full_name` field.
|
||||
|
||||
[[highlighting-all-field]]
|
||||
==== Highlighting and the `_all` field
|
||||
|
||||
A field can only be used for <<search-request-highlighting,highlighting>> if
|
||||
the original string value is available, either from the
|
||||
<<mapping-source-field,`_source`>> field or as a stored field.
|
||||
|
||||
The `_all` field is not present in the `_source` field and it is not stored or
|
||||
enabled by default, and so cannot be highlighted. There are two options. Either
|
||||
<<all-field-store,store the `_all` field>> or highlight the
|
||||
<<all-highlight-fields,original fields>>.
|
||||
|
||||
[[all-field-store]]
|
||||
===== Store the `_all` field
|
||||
|
||||
If `store` is set to `true`, then the original field value is retrievable and
|
||||
can be highlighted:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT myindex
|
||||
{
|
||||
"mappings": {
|
||||
"mytype": {
|
||||
"_all": {
|
||||
"enabled": true,
|
||||
"store": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT myindex/mytype/1
|
||||
{
|
||||
"first_name": "John",
|
||||
"last_name": "Smith"
|
||||
}
|
||||
|
||||
GET _search
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"_all": "John Smith"
|
||||
}
|
||||
},
|
||||
"highlight": {
|
||||
"fields": {
|
||||
"_all": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
Of course, enabling and storing the `_all` field will use significantly more
|
||||
disk space and, because it is a combination of other fields, it may result in
|
||||
odd highlighting results.
|
||||
|
||||
The `_all` field also accepts the `term_vector` and `index_options`
|
||||
parameters, allowing highlighting to use it.
|
||||
|
||||
[[all-highlight-fields]]
|
||||
===== Highlight original fields
|
||||
|
||||
You can query the `_all` field, but use the original fields for highlighting as follows:
|
||||
|
||||
[source,js]
|
||||
--------------------------------
|
||||
PUT myindex
|
||||
{
|
||||
"mappings": {
|
||||
"mytype": {
|
||||
"_all": {"enabled": true}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT myindex/mytype/1
|
||||
{
|
||||
"first_name": "John",
|
||||
"last_name": "Smith"
|
||||
}
|
||||
|
||||
GET _search
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"_all": "John Smith" <1>
|
||||
}
|
||||
},
|
||||
"highlight": {
|
||||
"fields": {
|
||||
"*_name": { <2>
|
||||
"require_field_match": false <3>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------
|
||||
// TEST[skip:_all is no longer allowed]
|
||||
// CONSOLE
|
||||
|
||||
<1> The query inspects the `_all` field to find matching documents.
|
||||
<2> Highlighting is performed on the two name fields, which are available from the `_source`.
|
||||
<3> The query wasn't run against the name fields, so set `require_field_match` to `false`.
|
|
@ -64,8 +64,6 @@ simple queries to filter the dataset by date or tags, and the results are
|
|||
returned as aggregations.
|
||||
|
||||
In this case, disabling the `_source` field will save space and reduce I/O.
|
||||
It is also advisable to disable the <<mapping-all-field,`_all` field>> in the
|
||||
metrics case.
|
||||
|
||||
**************************************************
|
||||
|
||||
|
|
|
@ -66,13 +66,6 @@ POST _search
|
|||
// CONSOLE
|
||||
|
||||
|
||||
The boost is also applied when it is copied with the
|
||||
value in the <<mapping-all-field,`_all`>> field. This means that, when
|
||||
querying the `_all` field, words that originated from the `title` field will
|
||||
have a higher score than words that originated in the `content` field.
|
||||
This functionality comes at a cost: queries on the `_all` field are slower
|
||||
when field boosting is used.
|
||||
|
||||
deprecated[5.0.0, index time boost is deprecated. Instead, the field mapping boost is applied at query time. For indices created before 5.0.0 the boost will still be applied at index time.]
|
||||
[WARNING]
|
||||
.Why index time boosting is a bad idea
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
[[copy-to]]
|
||||
=== `copy_to`
|
||||
|
||||
The `copy_to` parameter allows you to create custom
|
||||
<<mapping-all-field,`_all`>> fields. In other words, the values of multiple
|
||||
fields can be copied into a group field, which can then be queried as a single
|
||||
The `copy_to` parameter allows you to copy the values of multiple
|
||||
fields into a group field, which can then be queried as a single
|
||||
field. For instance, the `first_name` and `last_name` fields can be copied to
|
||||
the `full_name` field as follows:
|
||||
|
||||
|
|
|
@ -24,10 +24,12 @@ way to reindex old indices is to use the `reindex` API.
|
|||
[float]
|
||||
=== Also see:
|
||||
|
||||
* <<breaking_70_aggregations_changes>>
|
||||
* <<breaking_70_cluster_changes>>
|
||||
* <<breaking_70_indices_changes>>
|
||||
* <<breaking_70_aggregations_changes>>
|
||||
* <<breaking_70_mappings_changes>>
|
||||
|
||||
include::migrate_7_0/aggregations.asciidoc[]
|
||||
include::migrate_7_0/cluster.asciidoc[]
|
||||
include::migrate_7_0/indices.asciidoc[]
|
||||
include::migrate_7_0/aggregations.asciidoc[]
|
||||
include::migrate_7_0/mappings.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
[[breaking_70_mappings_changes]]
|
||||
=== Mapping changes
|
||||
|
||||
==== The `_all` meta field is removed
|
||||
|
||||
The `_all` field deprecated in 6 have now been removed.
|
|
@ -181,8 +181,7 @@ for documents `like: "Apple"`, but `unlike: "cake crumble tree"`. The syntax
|
|||
is the same as `like`.
|
||||
|
||||
`fields`::
|
||||
A list of fields to fetch and analyze the text from. Defaults to the `_all`
|
||||
field for free text and to all possible fields for document inputs.
|
||||
A list of fields to fetch and analyze the text from.
|
||||
|
||||
`like_text`::
|
||||
The text to find documents like it.
|
||||
|
|
|
@ -115,9 +115,7 @@ Defaults to `true`.
|
|||
|
||||
|`all_fields` | deprecated[6.0.0, set `default_field` to `*` instead]
|
||||
Perform the query on all fields detected in the mapping that can
|
||||
be queried. Will be used by default when the `_all` field is disabled and no
|
||||
`default_field` is specified (either in the index settings or in the request
|
||||
body) and no `fields` are specified.
|
||||
be queried.
|
||||
|
||||
|=======================================================================
|
||||
|
||||
|
|
|
@ -68,8 +68,7 @@ Defaults to `true`.
|
|||
|
||||
|`all_fields` | deprecated[6.0.0, set `fields` to `*` instead]
|
||||
Perform the query on all fields detected in the mapping that can
|
||||
be queried. Will be used by default when the `_all` field is disabled and no
|
||||
`default_field` is specified index settings, and no `fields` are specified.
|
||||
be queried.
|
||||
|=======================================================================
|
||||
|
||||
[float]
|
||||
|
|
|
@ -143,11 +143,11 @@ This will yield the same result as the previous request.
|
|||
|
||||
`df`::
|
||||
The default field to use when no field prefix is defined within
|
||||
the query. Defaults to _all field.
|
||||
the query.
|
||||
|
||||
`analyzer`::
|
||||
The analyzer name to be used when analyzing the query
|
||||
string. Defaults to the analyzer of the _all field.
|
||||
string. Defaults to the default search analyzer.
|
||||
|
||||
`analyze_wildcard`::
|
||||
Should wildcard and prefix queries be analyzed or
|
||||
|
|
|
@ -11,9 +11,6 @@ Highlighting requires the actual content of a field. If the field is not
|
|||
stored (the mapping does not set `store` to `true`), the actual `_source` is
|
||||
loaded and the relevant field is extracted from `_source`.
|
||||
|
||||
NOTE: The `_all` field cannot be extracted from `_source`, so it can only
|
||||
be used for highlighting if it is explicitly stored.
|
||||
|
||||
For example, to get highlights for the `content` field in each search hit
|
||||
using the default highlighter, include a `highlight` object in
|
||||
the request body that specifies the `content` field:
|
||||
|
@ -267,7 +264,7 @@ GET /_search
|
|||
"number_of_fragments" : 3,
|
||||
"fragment_size" : 150,
|
||||
"fields" : {
|
||||
"_all" : { "pre_tags" : ["<em>"], "post_tags" : ["</em>"] },
|
||||
"body" : { "pre_tags" : ["<em>"], "post_tags" : ["</em>"] },
|
||||
"blog.title" : { "number_of_fragments" : 0 },
|
||||
"blog.author" : { "number_of_fragments" : 0 },
|
||||
"blog.comment" : { "number_of_fragments" : 5, "order" : "score" }
|
||||
|
@ -392,7 +389,7 @@ GET /_search
|
|||
"pre_tags" : ["<tag1>"],
|
||||
"post_tags" : ["</tag1>"],
|
||||
"fields" : {
|
||||
"_all" : {}
|
||||
"body" : {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -414,7 +411,7 @@ GET /_search
|
|||
"pre_tags" : ["<tag1>", "<tag2>"],
|
||||
"post_tags" : ["</tag1>", "</tag2>"],
|
||||
"fields" : {
|
||||
"_all" : {}
|
||||
"body" : {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -484,7 +481,7 @@ GET /_search
|
|||
"highlight" : {
|
||||
"require_field_match": false,
|
||||
"fields": {
|
||||
"_all" : { "pre_tags" : ["<em>"], "post_tags" : ["</em>"] }
|
||||
"body" : { "pre_tags" : ["<em>"], "post_tags" : ["</em>"] }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -719,7 +716,7 @@ GET /_search
|
|||
},
|
||||
"highlight" : {
|
||||
"fields" : {
|
||||
"_all" : {},
|
||||
"body" : {},
|
||||
"blog.title" : {"number_of_fragments" : 0}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,8 +24,6 @@ import org.apache.lucene.analysis.TokenStream;
|
|||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.lucene.all.AllEntries;
|
||||
import org.elasticsearch.common.lucene.all.AllTokenStream;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -75,10 +73,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
|||
IndexAnalyzers indexAnalyzers = analysisModule.getAnalysisRegistry().build(idxSettings);
|
||||
Analyzer analyzer = indexAnalyzers.get(analyzerName).analyzer();
|
||||
|
||||
AllEntries allEntries = new AllEntries();
|
||||
allEntries.addText("field1", text, 1.0f);
|
||||
|
||||
TokenStream stream = AllTokenStream.allTokenStream("_all", text, 1.0f, analyzer);
|
||||
TokenStream stream = analyzer.tokenStream("" , text);
|
||||
stream.reset();
|
||||
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
|
||||
|
||||
|
|
|
@ -109,7 +109,6 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
|
|||
parser = indexService.mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_all").field("enabled", false).endObject()
|
||||
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
|
|
Loading…
Reference in New Issue