parent
537769c225
commit
385c43c141
|
@ -15,8 +15,6 @@ include::fields/source-field.asciidoc[]
|
|||
|
||||
include::fields/all-field.asciidoc[]
|
||||
|
||||
include::fields/analyzer-field.asciidoc[]
|
||||
|
||||
include::fields/parent-field.asciidoc[]
|
||||
|
||||
include::fields/field-names-field.asciidoc[]
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
[[mapping-analyzer-field]]
|
||||
=== `_analyzer`
|
||||
|
||||
The `_analyzer` mapping allows to use a document field property as the
|
||||
name of the analyzer that will be used to index the document. The
|
||||
analyzer will be used for any field that does not explicitly defines an
|
||||
`analyzer` or `index_analyzer` when indexing.
|
||||
|
||||
Here is a simple mapping:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"type1" : {
|
||||
"_analyzer" : {
|
||||
"path" : "my_field"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The above will use the value of the `my_field` to lookup an analyzer
|
||||
registered under it. For example, indexing the following doc:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"my_field" : "whitespace"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Will cause the `whitespace` analyzer to be used as the index analyzer
|
||||
for all fields without explicit analyzer setting.
|
||||
|
||||
The default path value is `_analyzer`, so the analyzer can be driven for
|
||||
a specific document by setting the `_analyzer` field in it. If a custom json
|
||||
field name is needed, an explicit mapping with a different path should
|
||||
be set.
|
||||
|
||||
By default, the `_analyzer` field is indexed, it can be disabled by
|
||||
settings `index` to `no` in the mapping.
|
|
@ -374,7 +374,7 @@ public interface Engine extends Closeable {
|
|||
}
|
||||
|
||||
public Analyzer analyzer() {
|
||||
return this.doc.analyzer();
|
||||
return docMapper.mappers().indexAnalyzer();
|
||||
}
|
||||
|
||||
public BytesReference source() {
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper;
|
|||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
|
@ -45,11 +44,28 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.StringAndBytesText;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.common.xcontent.smile.SmileXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.mapper.internal.*;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.SizeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
|
@ -57,7 +73,14 @@ import org.elasticsearch.script.ScriptService;
|
|||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
|
@ -184,7 +207,6 @@ public class DocumentMapper implements ToXContent {
|
|||
this.rootMappers.put(IndexFieldMapper.class, new IndexFieldMapper());
|
||||
this.rootMappers.put(SourceFieldMapper.class, new SourceFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper());
|
||||
this.rootMappers.put(AnalyzerMapper.class, new AnalyzerMapper());
|
||||
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper());
|
||||
this.rootMappers.put(BoostFieldMapper.class, new BoostFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings));
|
||||
|
@ -408,10 +430,6 @@ public class DocumentMapper implements ToXContent {
|
|||
return rootMapper(SourceFieldMapper.class);
|
||||
}
|
||||
|
||||
public AnalyzerMapper analyzerMapper() {
|
||||
return rootMapper(AnalyzerMapper.class);
|
||||
}
|
||||
|
||||
public AllFieldMapper allFieldMapper() {
|
||||
return rootMapper(AllFieldMapper.class);
|
||||
}
|
||||
|
@ -578,7 +596,7 @@ public class DocumentMapper implements ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), context.analyzer(),
|
||||
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(),
|
||||
context.source(), context.mappingsModified()).parent(source.parent());
|
||||
// reset the context to free up memory
|
||||
context.reset(null, null, null, null);
|
||||
|
|
|
@ -56,7 +56,6 @@ import org.elasticsearch.index.mapper.core.TypeParsers;
|
|||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.AnalyzerMapper;
|
||||
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
|
@ -147,7 +146,6 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser())
|
||||
.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser())
|
||||
.put(AllFieldMapper.NAME, new AllFieldMapper.TypeParser())
|
||||
.put(AnalyzerMapper.NAME, new AnalyzerMapper.TypeParser())
|
||||
.put(BoostFieldMapper.NAME, new BoostFieldMapper.TypeParser())
|
||||
.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser())
|
||||
.put(RoutingFieldMapper.NAME, new RoutingFieldMapper.TypeParser())
|
||||
|
|
|
@ -97,10 +97,6 @@ public final class MapperBuilders {
|
|||
return new AllFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static AnalyzerMapper.Builder analyzer() {
|
||||
return new AnalyzerMapper.Builder();
|
||||
}
|
||||
|
||||
public static RootObjectMapper.Builder rootObject(String name) {
|
||||
return new RootObjectMapper.Builder(name);
|
||||
}
|
||||
|
|
|
@ -354,16 +354,6 @@ public abstract class ParseContext {
|
|||
return in.allEntries();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Analyzer analyzer() {
|
||||
return in.analyzer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void analyzer(Analyzer analyzer) {
|
||||
in.analyzer(analyzer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean externalValueSet() {
|
||||
return in.externalValueSet();
|
||||
|
@ -405,8 +395,6 @@ public abstract class ParseContext {
|
|||
|
||||
private List<Document> documents = Lists.newArrayList();
|
||||
|
||||
private Analyzer analyzer;
|
||||
|
||||
private final String index;
|
||||
|
||||
@Nullable
|
||||
|
@ -449,7 +437,6 @@ public abstract class ParseContext {
|
|||
} else {
|
||||
this.documents = null;
|
||||
}
|
||||
this.analyzer = null;
|
||||
this.uid = null;
|
||||
this.version = null;
|
||||
this.id = null;
|
||||
|
@ -600,14 +587,6 @@ public abstract class ParseContext {
|
|||
return this.allEntries;
|
||||
}
|
||||
|
||||
public Analyzer analyzer() {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
public void analyzer(Analyzer analyzer) {
|
||||
this.analyzer = analyzer;
|
||||
}
|
||||
|
||||
public float docBoost() {
|
||||
return this.docBoost;
|
||||
}
|
||||
|
@ -789,10 +768,6 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract AllEntries allEntries();
|
||||
|
||||
public abstract Analyzer analyzer();
|
||||
|
||||
public abstract void analyzer(Analyzer analyzer);
|
||||
|
||||
/**
|
||||
* Return a new context that will have the external value set.
|
||||
*/
|
||||
|
|
|
@ -46,15 +46,13 @@ public class ParsedDocument {
|
|||
|
||||
private final List<Document> documents;
|
||||
|
||||
private final Analyzer analyzer;
|
||||
|
||||
private BytesReference source;
|
||||
|
||||
private boolean mappingsModified;
|
||||
|
||||
private String parent;
|
||||
|
||||
public ParsedDocument(Field uid, Field version, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, Analyzer analyzer, BytesReference source, boolean mappingsModified) {
|
||||
public ParsedDocument(Field uid, Field version, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, BytesReference source, boolean mappingsModified) {
|
||||
this.uid = uid;
|
||||
this.version = version;
|
||||
this.id = id;
|
||||
|
@ -64,7 +62,6 @@ public class ParsedDocument {
|
|||
this.ttl = ttl;
|
||||
this.documents = documents;
|
||||
this.source = source;
|
||||
this.analyzer = analyzer;
|
||||
this.mappingsModified = mappingsModified;
|
||||
}
|
||||
|
||||
|
@ -104,10 +101,6 @@ public class ParsedDocument {
|
|||
return this.documents;
|
||||
}
|
||||
|
||||
public Analyzer analyzer() {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
public BytesReference source() {
|
||||
return this.source;
|
||||
}
|
||||
|
|
|
@ -236,13 +236,10 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
|
|||
private Analyzer findAnalyzer(ParseContext context) {
|
||||
Analyzer analyzer = indexAnalyzer;
|
||||
if (analyzer == null) {
|
||||
analyzer = context.analyzer();
|
||||
analyzer = context.docMapper().mappers().indexAnalyzer();
|
||||
if (analyzer == null) {
|
||||
analyzer = context.docMapper().indexAnalyzer();
|
||||
if (analyzer == null) {
|
||||
// This should not happen, should we log warn it?
|
||||
analyzer = Lucene.STANDARD_ANALYZER;
|
||||
}
|
||||
// This should not happen, should we log warn it?
|
||||
analyzer = Lucene.STANDARD_ANALYZER;
|
||||
}
|
||||
}
|
||||
return analyzer;
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.search.highlight.HighlighterContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.analyzer;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class AnalyzerMapper implements Mapper, InternalMapper, RootMapper {
|
||||
|
||||
public static final String NAME = "_analyzer";
|
||||
public static final String CONTENT_TYPE = "_analyzer";
|
||||
|
||||
public static class Defaults {
|
||||
public static final String PATH = "_analyzer";
|
||||
}
|
||||
|
||||
public static class Builder extends Mapper.Builder<Builder, AnalyzerMapper> {
|
||||
|
||||
private String field = Defaults.PATH;
|
||||
|
||||
public Builder() {
|
||||
super(CONTENT_TYPE);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
public Builder field(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnalyzerMapper build(BuilderContext context) {
|
||||
return new AnalyzerMapper(field);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
AnalyzerMapper.Builder builder = analyzer();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path")) {
|
||||
builder.field(fieldNode.toString());
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
private final String path;
|
||||
|
||||
public AnalyzerMapper() {
|
||||
this(Defaults.PATH);
|
||||
}
|
||||
|
||||
public AnalyzerMapper(String path) {
|
||||
this.path = path.intern();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
Analyzer analyzer = context.docMapper().mappers().indexAnalyzer();
|
||||
if (path != null) {
|
||||
String value = null;
|
||||
List<IndexableField> fields = context.doc().getFields();
|
||||
for (int i = 0, fieldsSize = fields.size(); i < fieldsSize; i++) {
|
||||
IndexableField field = fields.get(i);
|
||||
if (field.name().equals(path)) {
|
||||
value = field.stringValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (value == null) {
|
||||
value = context.ignoredValue(path);
|
||||
}
|
||||
if (value != null) {
|
||||
analyzer = context.analysisService().analyzer(value);
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("No analyzer found for [" + value + "] from path [" + path + "]");
|
||||
}
|
||||
analyzer = context.docMapper().mappers().indexAnalyzer(analyzer);
|
||||
}
|
||||
}
|
||||
context.analyzer(analyzer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean includeInObject() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public Analyzer setAnalyzer(HighlighterContext context){
|
||||
if (context.analyzer() != null){
|
||||
return context.analyzer();
|
||||
}
|
||||
|
||||
Analyzer analyzer = null;
|
||||
|
||||
if (path != null) {
|
||||
String analyzerName = (String) context.context.lookup().source().extractValue(path);
|
||||
analyzer = context.context.mapperService().analysisService().analyzer(analyzerName);
|
||||
}
|
||||
|
||||
if (analyzer == null) {
|
||||
analyzer = context.context.mapperService().documentMapper(context.hitContext.hit().type()).mappers().indexAnalyzer();
|
||||
}
|
||||
context.analyzer(analyzer);
|
||||
|
||||
return analyzer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void traverse(FieldMapperListener fieldMapperListener) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void traverse(ObjectMapperListener objectMapperListener) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (path.equals(Defaults.PATH)) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
if (!path.equals(Defaults.PATH)) {
|
||||
builder.field("path", path);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
}
|
|
@ -65,7 +65,8 @@ class MultiDocumentPercolatorIndex implements PercolatorIndex {
|
|||
} else {
|
||||
memoryIndex = new MemoryIndex(true);
|
||||
}
|
||||
memoryIndices[i] = indexDoc(d, parsedDocument.analyzer(), memoryIndex).createSearcher().getIndexReader();
|
||||
Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer();
|
||||
memoryIndices[i] = indexDoc(d, analyzer, memoryIndex).createSearcher().getIndexReader();
|
||||
}
|
||||
MultiReader mReader = new MultiReader(memoryIndices, true);
|
||||
try {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
package org.elasticsearch.percolator;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -54,9 +55,10 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex {
|
|||
continue;
|
||||
}
|
||||
try {
|
||||
Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer();
|
||||
// TODO: instead of passing null here, we can have a CTL<Map<String,TokenStream>> and pass previous,
|
||||
// like the indexer does
|
||||
TokenStream tokenStream = field.tokenStream(parsedDocument.analyzer(), null);
|
||||
TokenStream tokenStream = field.tokenStream(analyzer, null);
|
||||
if (tokenStream != null) {
|
||||
memoryIndex.addField(field.name(), tokenStream, field.boost());
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ public class HighlighterContext {
|
|||
public final SearchContext context;
|
||||
public final FetchSubPhase.HitContext hitContext;
|
||||
public final HighlightQuery query;
|
||||
private Analyzer analyzer;
|
||||
|
||||
public HighlighterContext(String fieldName, SearchContextHighlight.Field field, FieldMapper<?> mapper, SearchContext context,
|
||||
FetchSubPhase.HitContext hitContext, HighlightQuery query) {
|
||||
|
@ -70,12 +69,4 @@ public class HighlighterContext {
|
|||
return query;
|
||||
}
|
||||
}
|
||||
|
||||
public Analyzer analyzer() {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
public void analyzer(Analyzer analyzer) {
|
||||
this.analyzer = analyzer;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
|||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.AnalyzerMapper;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -99,10 +98,7 @@ public class PlainHighlighter implements Highlighter {
|
|||
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments();
|
||||
ArrayList<TextFragment> fragsList = new ArrayList<>();
|
||||
List<Object> textsToHighlight;
|
||||
|
||||
AnalyzerMapper analyzerMapper = context.mapperService().documentMapper(hitContext.hit().type()).analyzerMapper();
|
||||
|
||||
Analyzer analyzer = analyzerMapper.setAnalyzer(highlighterContext);
|
||||
Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer();
|
||||
|
||||
try {
|
||||
textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext);
|
||||
|
|
|
@ -52,10 +52,13 @@ import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
|
|||
import org.elasticsearch.index.engine.*;
|
||||
import org.elasticsearch.index.indexing.ShardIndexingService;
|
||||
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
import org.elasticsearch.index.merge.OnGoingMerge;
|
||||
import org.elasticsearch.index.merge.policy.LogByteSizeMergePolicyProvider;
|
||||
import org.elasticsearch.index.merge.policy.MergePolicyProvider;
|
||||
|
@ -98,6 +101,10 @@ import static org.hamcrest.Matchers.*;
|
|||
public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
||||
|
||||
protected final ShardId shardId = new ShardId(new Index("index"), 1);
|
||||
protected final DocumentMapper fakeType = new DocumentMapper.Builder("type",
|
||||
ImmutableSettings.settingsBuilder().put("index.version.created", Version.CURRENT).build(),
|
||||
new RootObjectMapper.Builder("")).
|
||||
indexAnalyzer(Lucene.STANDARD_ANALYZER).build(null);
|
||||
|
||||
protected ThreadPool threadPool;
|
||||
|
||||
|
@ -161,12 +168,12 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
}
|
||||
|
||||
|
||||
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, Analyzer analyzer, BytesReference source, boolean mappingsModified) {
|
||||
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, BytesReference source, boolean mappingsModified) {
|
||||
Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE);
|
||||
Field versionField = new NumericDocValuesField("_version", 0);
|
||||
document.add(uidField);
|
||||
document.add(versionField);
|
||||
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), analyzer, source, mappingsModified);
|
||||
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsModified);
|
||||
}
|
||||
|
||||
protected Store createStore() throws IOException {
|
||||
|
@ -248,11 +255,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
final boolean defaultCompound = defaultSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true);
|
||||
|
||||
// create a doc and refresh
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
|
||||
engine.refresh("test", false);
|
||||
|
||||
segments = engine.segments(false);
|
||||
|
@ -284,8 +291,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build());
|
||||
|
||||
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
||||
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("3"), doc3));
|
||||
engine.refresh("test", false);
|
||||
|
||||
segments = engine.segments(false);
|
||||
|
@ -331,8 +338,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
assertThat(segments.get(1).isCompound(), equalTo(false));
|
||||
|
||||
engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true).build());
|
||||
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("4"), doc4));
|
||||
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("4"), doc4));
|
||||
engine.refresh("test", false);
|
||||
|
||||
segments = engine.segments(false);
|
||||
|
@ -362,19 +369,19 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
List<Segment> segments = engine.segments(true);
|
||||
assertThat(segments.isEmpty(), equalTo(true));
|
||||
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
engine.refresh("test", false);
|
||||
|
||||
segments = engine.segments(true);
|
||||
assertThat(segments.size(), equalTo(1));
|
||||
assertThat(segments.get(0).ramTree, notNullValue());
|
||||
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
|
||||
engine.refresh("test", false);
|
||||
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
||||
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("3"), doc3));
|
||||
engine.refresh("test", false);
|
||||
|
||||
segments = engine.segments(true);
|
||||
|
@ -413,12 +420,12 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
});
|
||||
|
||||
final Engine engine = createEngine(engineSettingsService, store, createTranslog(), mergeSchedulerProvider);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
assertThat(engine.segments(false).size(), equalTo(1));
|
||||
index = new Engine.Index(null, newUid("2"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("2"), doc);
|
||||
engine.index(index);
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
List<Segment> segments = engine.segments(false);
|
||||
|
@ -426,7 +433,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
for (Segment segment : segments) {
|
||||
assertThat(segment.getMergeId(), nullValue());
|
||||
}
|
||||
index = new Engine.Index(null, newUid("3"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("3"), doc);
|
||||
engine.index(index);
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
segments = engine.segments(false);
|
||||
|
@ -446,7 +453,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
waitForMerge.get().countDown();
|
||||
|
||||
index = new Engine.Index(null, newUid("4"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("4"), doc);
|
||||
engine.index(index);
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration();
|
||||
|
@ -495,8 +502,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
// create a document
|
||||
Document document = testDocumentWithTextField();
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
|
||||
// its not there...
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -534,8 +541,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
document = testDocument();
|
||||
document.add(new TextField("value", "test1", Field.Store.YES));
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
|
||||
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.index(new Engine.Index(null, newUid("1"), doc));
|
||||
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
|
||||
engine.index(new Engine.Index(fakeType, newUid("1"), doc));
|
||||
|
||||
// its not updated yet...
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -587,8 +594,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
// add it back
|
||||
document = testDocumentWithTextField();
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
|
||||
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
|
||||
// its not there...
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -621,8 +628,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
// now do an update
|
||||
document = testDocument();
|
||||
document.add(new TextField("value", "test1", Field.Store.YES));
|
||||
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.index(new Engine.Index(null, newUid("1"), doc));
|
||||
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
|
||||
engine.index(new Engine.Index(fakeType, newUid("1"), doc));
|
||||
|
||||
// its not updated yet...
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -650,8 +657,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
searchResult.close();
|
||||
|
||||
// create a document
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
|
||||
// its not there...
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -683,8 +690,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testFailEngineOnCorruption() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
final boolean failEngine = defaultSettings.getAsBoolean(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, false);
|
||||
final int failInPhase = randomIntBetween(1, 3);
|
||||
|
@ -721,8 +728,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
|
||||
searchResult.close();
|
||||
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
|
||||
engine.refresh("foo", false);
|
||||
|
||||
searchResult = engine.acquireSearcher("test");
|
||||
|
@ -738,8 +745,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testSimpleRecover() throws Exception {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
|
@ -783,11 +790,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
|
||||
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc1));
|
||||
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc1));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
@Override
|
||||
|
@ -814,11 +821,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
|
||||
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc1));
|
||||
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc1));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
@Override
|
||||
|
@ -833,8 +840,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
assertThat(create.source().toBytesArray(), equalTo(B_2));
|
||||
|
||||
// add for phase3
|
||||
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
||||
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("3"), doc3));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -852,64 +859,64 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningNewCreate() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
|
||||
create = new Engine.Create(null, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
create = new Engine.Create(fakeType, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExternalVersioningNewCreate() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(12l));
|
||||
|
||||
create = new Engine.Create(null, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
create = new Engine.Create(fakeType, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.create(create);
|
||||
assertThat(create.version(), equalTo(12l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testVersioningNewIndex() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExternalVersioningNewIndex() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testVersioningIndexConflict() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -918,7 +925,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
}
|
||||
|
||||
// future versions should not work as well
|
||||
index = new Engine.Index(null, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -929,16 +936,16 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testExternalVersioningIndexConflict() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(14l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -949,18 +956,18 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningIndexConflictWithFlush() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -969,7 +976,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
}
|
||||
|
||||
// future versions should not work as well
|
||||
index = new Engine.Index(null, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -980,18 +987,18 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testExternalVersioningIndexConflictWithFlush() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(14l));
|
||||
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -1002,12 +1009,12 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningDeleteConflict() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
|
@ -1034,7 +1041,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
assertThat(delete.version(), equalTo(3l));
|
||||
|
||||
// now check if we can index to a delete doc with version
|
||||
index = new Engine.Index(null, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -1043,7 +1050,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
}
|
||||
|
||||
// we shouldn't be able to create as well
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.create(create);
|
||||
} catch (VersionConflictEngineException e) {
|
||||
|
@ -1053,12 +1060,12 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningDeleteConflictWithFlush() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
|
@ -1091,7 +1098,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
|
||||
// now check if we can index to a delete doc with version
|
||||
index = new Engine.Index(null, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.index(index);
|
||||
fail();
|
||||
|
@ -1100,7 +1107,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
}
|
||||
|
||||
// we shouldn't be able to create as well
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.create(create);
|
||||
} catch (VersionConflictEngineException e) {
|
||||
|
@ -1110,12 +1117,12 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningCreateExistsException() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
|
||||
create = new Engine.Create(null, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.create(create);
|
||||
fail();
|
||||
|
@ -1126,14 +1133,14 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningCreateExistsExceptionWithFlush() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
|
||||
create = new Engine.Create(null, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
|
||||
try {
|
||||
engine.create(create);
|
||||
fail();
|
||||
|
@ -1144,22 +1151,22 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningReplicaConflict1() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
// apply the second index to the replica, should work fine
|
||||
index = new Engine.Index(null, newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
// now, the old one should not work
|
||||
index = new Engine.Index(null, newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
try {
|
||||
replicaEngine.index(index);
|
||||
fail();
|
||||
|
@ -1169,7 +1176,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
// second version on replica should fail as well
|
||||
try {
|
||||
index = new Engine.Index(null, newUid("1"), doc, 2l
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 2l
|
||||
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
@ -1180,19 +1187,19 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testVersioningReplicaConflict2() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
// apply the first index to the replica, should work fine
|
||||
index = new Engine.Index(null, newUid("1"), doc, 1l
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 1l
|
||||
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
||||
// index it again
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(2l));
|
||||
|
||||
|
@ -1219,7 +1226,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
// now do the second index on the replica, it should fail
|
||||
try {
|
||||
index = new Engine.Index(null, newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
|
||||
replicaEngine.index(index);
|
||||
fail("excepted VersionConflictEngineException to be thrown");
|
||||
} catch (VersionConflictEngineException e) {
|
||||
|
@ -1230,26 +1237,26 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testBasicCreatedFlag() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertTrue(index.created());
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertFalse(index.created());
|
||||
|
||||
engine.delete(new Engine.Delete(null, "1", newUid("1")));
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertTrue(index.created());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreatedFlagAfterFlush() {
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
|
||||
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertTrue(index.created());
|
||||
|
||||
|
@ -1257,7 +1264,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
|
||||
index = new Engine.Index(null, newUid("1"), doc);
|
||||
index = new Engine.Index(fakeType, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertTrue(index.created());
|
||||
}
|
||||
|
@ -1304,14 +1311,14 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
try {
|
||||
// First, with DEBUG, which should NOT log IndexWriter output:
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
assertFalse(mockAppender.sawIndexWriterMessage);
|
||||
|
||||
// Again, with TRACE, which should log IndexWriter output:
|
||||
rootLogger.setLevel(Level.TRACE);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc));
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
assertTrue(mockAppender.sawIndexWriterMessage);
|
||||
|
||||
|
@ -1339,15 +1346,15 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
try {
|
||||
// First, with DEBUG, which should NOT log IndexWriter output:
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
|
||||
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
assertFalse(mockAppender.sawIndexWriterMessage);
|
||||
assertFalse(mockAppender.sawIndexWriterIFDMessage);
|
||||
|
||||
// Again, with TRACE, which should only log IndexWriter IFD output:
|
||||
iwIFDLogger.setLevel(Level.TRACE);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc));
|
||||
engine.create(new Engine.Create(fakeType, newUid("2"), doc));
|
||||
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
|
||||
assertFalse(mockAppender.sawIndexWriterMessage);
|
||||
assertTrue(mockAppender.sawIndexWriterIFDMessage);
|
||||
|
@ -1376,8 +1383,8 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
Document document = testDocument();
|
||||
document.add(new TextField("value", "test1", Field.Store.YES));
|
||||
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.index(new Engine.Index(null, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
|
||||
engine.index(new Engine.Index(fakeType, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
|
||||
|
||||
// Delete document we just added:
|
||||
engine.delete(new Engine.Delete("test", "1", newUid("1"), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
|
||||
|
@ -1402,7 +1409,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
// Try to index uid=1 with a too-old version, should fail:
|
||||
try {
|
||||
engine.index(new Engine.Index(null, newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
|
||||
engine.index(new Engine.Index(fakeType, newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
|
||||
fail("did not hit expected exception");
|
||||
} catch (VersionConflictEngineException vcee) {
|
||||
// expected
|
||||
|
@ -1414,7 +1421,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
|
|||
|
||||
// Try to index uid=2 with a too-old version, should fail:
|
||||
try {
|
||||
engine.index(new Engine.Index(null, newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
|
||||
engine.index(new Engine.Index(fakeType, newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
|
||||
fail("did not hit expected exception");
|
||||
} catch (VersionConflictEngineException vcee) {
|
||||
// expected
|
||||
|
|
|
@ -1,164 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.analyzer;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class AnalyzerMapperTests extends ElasticsearchSingleNodeTest {
|
||||
|
||||
@Test
|
||||
public void testAnalyzerMapping() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_analyzer").field("path", "field_analyzer").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field2").field("type", "string").field("analyzer", "simple").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper documentMapper = parser.parse(mapping);
|
||||
|
||||
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field_analyzer", "whitespace")
|
||||
.field("field1", "value1")
|
||||
.field("field2", "value2")
|
||||
.endObject().bytes());
|
||||
|
||||
FieldNameAnalyzer analyzer = (FieldNameAnalyzer) doc.analyzer();
|
||||
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
|
||||
|
||||
// check that it serializes and de-serializes correctly
|
||||
|
||||
DocumentMapper reparsedMapper = parser.parse(documentMapper.mappingSource().string());
|
||||
|
||||
doc = reparsedMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field_analyzer", "whitespace")
|
||||
.field("field1", "value1")
|
||||
.field("field2", "value2")
|
||||
.endObject().bytes());
|
||||
|
||||
analyzer = (FieldNameAnalyzer) doc.analyzer();
|
||||
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAnalyzerMappingExplicit() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_analyzer").field("path", "field_analyzer").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field_analyzer").field("type", "string").endObject()
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field2").field("type", "string").field("analyzer", "simple").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper documentMapper = parser.parse(mapping);
|
||||
|
||||
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field_analyzer", "whitespace")
|
||||
.field("field1", "value1")
|
||||
.field("field2", "value2")
|
||||
.endObject().bytes());
|
||||
|
||||
FieldNameAnalyzer analyzer = (FieldNameAnalyzer) doc.analyzer();
|
||||
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
|
||||
|
||||
// check that it serializes and de-serializes correctly
|
||||
|
||||
DocumentMapper reparsedMapper = parser.parse(documentMapper.mappingSource().string());
|
||||
|
||||
doc = reparsedMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field_analyzer", "whitespace")
|
||||
.field("field1", "value1")
|
||||
.field("field2", "value2")
|
||||
.endObject().bytes());
|
||||
|
||||
analyzer = (FieldNameAnalyzer) doc.analyzer();
|
||||
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAnalyzerMappingNotIndexedNorStored() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_analyzer").field("path", "field_analyzer").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field_analyzer").field("type", "string").field("index", "no").field("store", "no").endObject()
|
||||
.startObject("field1").field("type", "string").endObject()
|
||||
.startObject("field2").field("type", "string").field("analyzer", "simple").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper documentMapper = parser.parse(mapping);
|
||||
|
||||
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field_analyzer", "whitespace")
|
||||
.field("field1", "value1")
|
||||
.field("field2", "value2")
|
||||
.endObject().bytes());
|
||||
|
||||
FieldNameAnalyzer analyzer = (FieldNameAnalyzer) doc.analyzer();
|
||||
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
|
||||
|
||||
// check that it serializes and de-serializes correctly
|
||||
|
||||
DocumentMapper reparsedMapper = parser.parse(documentMapper.mappingSource().string());
|
||||
|
||||
doc = reparsedMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field_analyzer", "whitespace")
|
||||
.field("field1", "value1")
|
||||
.field("field2", "value2")
|
||||
.endObject().bytes());
|
||||
|
||||
analyzer = (FieldNameAnalyzer) doc.analyzer();
|
||||
assertThat(((NamedAnalyzer) analyzer.defaultAnalyzer()).name(), equalTo("whitespace"));
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field1")), nullValue());
|
||||
assertThat(((NamedAnalyzer) analyzer.analyzers().get("field2")).name(), equalTo("simple"));
|
||||
}
|
||||
}
|
|
@ -58,8 +58,8 @@ public class DoubleIndexingDocTest extends ElasticsearchSingleNodeLuceneTestCase
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
writer.addDocument(doc.rootDoc(), doc.analyzer());
|
||||
writer.addDocument(doc.rootDoc(), doc.analyzer());
|
||||
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer());
|
||||
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer());
|
||||
|
||||
IndexReader reader = DirectoryReader.open(writer, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
|
|
@ -73,7 +73,7 @@ public class StoredNumericValuesTest extends ElasticsearchSingleNodeTest {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
writer.addDocument(doc.rootDoc(), doc.analyzer());
|
||||
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer());
|
||||
|
||||
// Indexing a doc in the old way
|
||||
FieldType fieldType = new FieldType();
|
||||
|
|
|
@ -647,38 +647,6 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest {
|
|||
assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPlainHighlighterDocumentAnalyzer() throws Exception {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_analyzer")
|
||||
.field("path", "language_analyzer")
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("language_analyzer")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.startObject("text")
|
||||
.field("type", "string")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
ensureYellow();
|
||||
|
||||
index("test", "type1", "1",
|
||||
"language_analyzer", "english",
|
||||
"text", "Look at me, I'm eating cars.");
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchQuery("text", "car"))
|
||||
.addHighlightedField(
|
||||
new HighlightBuilder.Field("text").preTags("<1>").postTags("</1>").requireFieldMatch(true))
|
||||
.get();
|
||||
assertHighlight(response, 0, "text", 0, 1, equalTo("Look at me, I'm eating <1>cars</1>."));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFastVectorHighlighter() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping()));
|
||||
|
|
Loading…
Reference in New Issue