Mappings: Remove type level default analyzers

closes #8874
This commit is contained in:
Ryan Ernst 2015-01-08 09:08:14 -08:00
parent 6f894b1d2c
commit cff0ec3972
17 changed files with 157 additions and 261 deletions

View File

@ -131,16 +131,32 @@ def create_client(http_port, timeout=30):
time.sleep(1) time.sleep(1)
assert False, 'Timed out waiting for node for %s seconds' % timeout assert False, 'Timed out waiting for node for %s seconds' % timeout
def generate_index(client): def generate_index(client, version):
client.indices.delete(index='test', ignore=404) client.indices.delete(index='test', ignore=404)
num_shards = random.randint(1, 10) num_shards = random.randint(1, 10)
num_replicas = random.randint(0, 1) num_replicas = random.randint(0, 1)
logging.info('Create single shard test index') logging.info('Create single shard test index')
mappings = {}
if not version.startswith('2.'):
# TODO: we need better "before/onOr/after" logic in python
# backcompat test for legacy type level analyzer settings, see #8874
mappings['analyzer_type1'] = {
'analyzer': 'standard',
}
mappings['analyzer_type2'] = {
'index_analyzer': 'standard',
'search_analyzer': 'keyword',
'search_quote_analyzer': 'english',
}
client.indices.create(index='test', body={ client.indices.create(index='test', body={
'settings': { 'settings': {
'number_of_shards': 1, 'number_of_shards': 1,
'number_of_replicas': 0 'number_of_replicas': 0
} },
'mappings': mappings
}) })
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
assert health['timed_out'] == False, 'cluster health timed out %s' % health assert health['timed_out'] == False, 'cluster health timed out %s' % health
@ -152,32 +168,32 @@ def generate_index(client):
def snapshot_index(client, cfg): def snapshot_index(client, cfg):
# Add bogus persistent settings to make sure they can be restored # Add bogus persistent settings to make sure they can be restored
client.cluster.put_settings(body = { client.cluster.put_settings(body={
'persistent': { 'persistent': {
'cluster.routing.allocation.exclude.version_attr' : cfg.version 'cluster.routing.allocation.exclude.version_attr': cfg.version
} }
}) })
client.indices.put_template(name = 'template_' + cfg.version.lower(), order = 0, body = { client.indices.put_template(name='template_' + cfg.version.lower(), order=0, body={
"template" : "te*", "template": "te*",
"settings" : { "settings": {
"number_of_shards" : 1 "number_of_shards" : 1
}, },
"mappings" : { "mappings": {
"type1" : { "type1": {
"_source" : { "enabled" : False } "_source": { "enabled" : False }
} }
}, },
"aliases" : { "aliases": {
"alias1" : {}, "alias1": {},
"alias2" : { "alias2": {
"filter" : { "filter": {
"term" : {"version" : cfg.version } "term": {"version" : cfg.version }
}, },
"routing" : "kimchy" "routing": "kimchy"
}, },
"{index}-alias" : {} "{index}-alias": {}
} }
}); })
client.snapshot.create_repository(repository='test_repo', body={ client.snapshot.create_repository(repository='test_repo', body={
'type': 'fs', 'type': 'fs',
'settings': { 'settings': {
@ -243,7 +259,7 @@ def main():
try: try:
node = start_node(cfg.version, cfg.release_dir, cfg.data_dir, cfg.tcp_port, cfg.http_port) node = start_node(cfg.version, cfg.release_dir, cfg.data_dir, cfg.tcp_port, cfg.http_port)
client = create_client(cfg.http_port) client = create_client(cfg.http_port)
generate_index(client) generate_index(client, cfg.version)
if cfg.snapshot_supported: if cfg.snapshot_supported:
snapshot_index(client, cfg) snapshot_index(client, cfg)
finally: finally:

View File

@ -286,6 +286,7 @@ public interface Engine extends Closeable {
static abstract class IndexingOperation implements Operation { static abstract class IndexingOperation implements Operation {
private final DocumentMapper docMapper; private final DocumentMapper docMapper;
private final Analyzer analyzer;
private final Term uid; private final Term uid;
private final ParsedDocument doc; private final ParsedDocument doc;
private long version; private long version;
@ -296,8 +297,9 @@ public interface Engine extends Closeable {
private final long startTime; private final long startTime;
private long endTime; private long endTime;
public IndexingOperation(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) { public IndexingOperation(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
this.docMapper = docMapper; this.docMapper = docMapper;
this.analyzer = analyzer;
this.uid = uid; this.uid = uid;
this.doc = doc; this.doc = doc;
this.version = version; this.version = version;
@ -307,8 +309,8 @@ public interface Engine extends Closeable {
this.canHaveDuplicates = canHaveDuplicates; this.canHaveDuplicates = canHaveDuplicates;
} }
public IndexingOperation(DocumentMapper docMapper, Term uid, ParsedDocument doc) { public IndexingOperation(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc) {
this(docMapper, uid, doc, Versions.MATCH_ANY, VersionType.INTERNAL, Origin.PRIMARY, System.nanoTime(), true); this(docMapper, analyzer, uid, doc, Versions.MATCH_ANY, VersionType.INTERNAL, Origin.PRIMARY, System.nanoTime(), true);
} }
public DocumentMapper docMapper() { public DocumentMapper docMapper() {
@ -374,7 +376,7 @@ public interface Engine extends Closeable {
} }
public Analyzer analyzer() { public Analyzer analyzer() {
return docMapper.mappers().indexAnalyzer(); return this.analyzer;
} }
public BytesReference source() { public BytesReference source() {
@ -403,17 +405,17 @@ public interface Engine extends Closeable {
static final class Create extends IndexingOperation { static final class Create extends IndexingOperation {
private final boolean autoGeneratedId; private final boolean autoGeneratedId;
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates, boolean autoGeneratedId) { public Create(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates, boolean autoGeneratedId) {
super(docMapper, uid, doc, version, versionType, origin, startTime, canHaveDuplicates); super(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
this.autoGeneratedId = autoGeneratedId; this.autoGeneratedId = autoGeneratedId;
} }
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) { public Create(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
this(docMapper, uid, doc, version, versionType, origin, startTime, true, false); this(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, true, false);
} }
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc) { public Create(DocumentMapper docMapper,Analyzer analyzer, Term uid, ParsedDocument doc) {
super(docMapper, uid, doc); super(docMapper, analyzer, uid, doc);
autoGeneratedId = false; autoGeneratedId = false;
} }
@ -431,16 +433,16 @@ public interface Engine extends Closeable {
static final class Index extends IndexingOperation { static final class Index extends IndexingOperation {
private boolean created; private boolean created;
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) { public Index(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
super(docMapper, uid, doc, version, versionType, origin, startTime, canHaveDuplicates); super(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
} }
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) { public Index(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
super(docMapper, uid, doc, version, versionType, origin, startTime, true); super(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, true);
} }
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc) { public Index(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc) {
super(docMapper, uid, doc); super(docMapper, analyzer, uid, doc);
} }
@Override @Override

View File

@ -24,6 +24,7 @@ import com.google.common.collect.Collections2;
import com.google.common.collect.ForwardingSet; import com.google.common.collect.ForwardingSet;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.Collection; import java.util.Collection;
@ -42,8 +43,10 @@ public final class DocumentFieldMappers extends ForwardingSet<FieldMapper<?>> {
private final FieldNameAnalyzer searchAnalyzer; private final FieldNameAnalyzer searchAnalyzer;
private final FieldNameAnalyzer searchQuoteAnalyzer; private final FieldNameAnalyzer searchQuoteAnalyzer;
public DocumentFieldMappers(DocumentMapper docMapper) { public DocumentFieldMappers(AnalysisService analysisService) {
this(new FieldMappersLookup(), new FieldNameAnalyzer(docMapper.indexAnalyzer()), new FieldNameAnalyzer(docMapper.searchAnalyzer()), new FieldNameAnalyzer(docMapper.searchQuotedAnalyzer())); this(new FieldMappersLookup(), new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
} }
private DocumentFieldMappers(FieldMappersLookup fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) { private DocumentFieldMappers(FieldMappersLookup fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {

View File

@ -166,12 +166,6 @@ public class DocumentMapper implements ToXContent {
private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>(); private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>();
private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer;
private NamedAnalyzer searchQuoteAnalyzer;
private List<SourceTransform> sourceTransforms; private List<SourceTransform> sourceTransforms;
private final String index; private final String index;
@ -228,36 +222,6 @@ public class DocumentMapper implements ToXContent {
return this; return this;
} }
public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return this;
}
public boolean hasIndexAnalyzer() {
return indexAnalyzer != null;
}
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
if (this.searchQuoteAnalyzer == null) {
this.searchQuoteAnalyzer = searchAnalyzer;
}
return this;
}
public Builder searchQuoteAnalyzer(NamedAnalyzer searchQuoteAnalyzer) {
this.searchQuoteAnalyzer = searchQuoteAnalyzer;
return this;
}
public boolean hasSearchAnalyzer() {
return searchAnalyzer != null;
}
public boolean hasSearchQuoteAnalyzer() {
return searchQuoteAnalyzer != null;
}
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map<String, Object> parameters) { public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map<String, Object> parameters) {
if (sourceTransforms == null) { if (sourceTransforms == null) {
sourceTransforms = new ArrayList<>(); sourceTransforms = new ArrayList<>();
@ -268,8 +232,7 @@ public class DocumentMapper implements ToXContent {
public DocumentMapper build(DocumentMapperParser docMapperParser) { public DocumentMapper build(DocumentMapperParser docMapperParser) {
Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set"); Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new DocumentMapper(index, indexSettings, docMapperParser, rootObjectMapper, meta, return new DocumentMapper(index, indexSettings, docMapperParser, rootObjectMapper, meta, rootMappers, sourceTransforms);
indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, rootMappers, sourceTransforms);
} }
} }
@ -300,11 +263,6 @@ public class DocumentMapper implements ToXContent {
private final RootMapper[] rootMappersOrdered; private final RootMapper[] rootMappersOrdered;
private final RootMapper[] rootMappersNotIncludedInObject; private final RootMapper[] rootMappersNotIncludedInObject;
private final NamedAnalyzer indexAnalyzer;
private final NamedAnalyzer searchAnalyzer;
private final NamedAnalyzer searchQuoteAnalyzer;
private volatile DocumentFieldMappers fieldMappers; private volatile DocumentFieldMappers fieldMappers;
private volatile ImmutableMap<String, ObjectMapper> objectMappers = ImmutableMap.of(); private volatile ImmutableMap<String, ObjectMapper> objectMappers = ImmutableMap.of();
@ -324,7 +282,6 @@ public class DocumentMapper implements ToXContent {
public DocumentMapper(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, public DocumentMapper(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper, RootObjectMapper rootObjectMapper,
ImmutableMap<String, Object> meta, ImmutableMap<String, Object> meta,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, NamedAnalyzer searchQuoteAnalyzer,
Map<Class<? extends RootMapper>, RootMapper> rootMappers, List<SourceTransform> sourceTransforms) { Map<Class<? extends RootMapper>, RootMapper> rootMappers, List<SourceTransform> sourceTransforms) {
this.index = index; this.index = index;
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
@ -345,10 +302,6 @@ public class DocumentMapper implements ToXContent {
} }
this.rootMappersNotIncludedInObject = rootMappersNotIncludedInObjectLst.toArray(new RootMapper[rootMappersNotIncludedInObjectLst.size()]); this.rootMappersNotIncludedInObject = rootMappersNotIncludedInObjectLst.toArray(new RootMapper[rootMappersNotIncludedInObjectLst.size()]);
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
this.searchQuoteAnalyzer = searchQuoteAnalyzer != null ? searchQuoteAnalyzer : searchAnalyzer;
this.typeFilter = typeMapper().termFilter(type, null); this.typeFilter = typeMapper().termFilter(type, null);
if (rootMapper(ParentFieldMapper.class).active()) { if (rootMapper(ParentFieldMapper.class).active()) {
@ -370,7 +323,7 @@ public class DocumentMapper implements ToXContent {
// now traverse and get all the statically defined ones // now traverse and get all the statically defined ones
rootObjectMapper.traverse(fieldMappersAgg); rootObjectMapper.traverse(fieldMappersAgg);
this.fieldMappers = new DocumentFieldMappers(this).copyAndAllAll(fieldMappersAgg.mappers); this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(fieldMappersAgg.mappers);
final Map<String, ObjectMapper> objectMappers = Maps.newHashMap(); final Map<String, ObjectMapper> objectMappers = Maps.newHashMap();
rootObjectMapper.traverse(new ObjectMapperListener() { rootObjectMapper.traverse(new ObjectMapperListener() {
@ -470,18 +423,6 @@ public class DocumentMapper implements ToXContent {
return rootMapper(BoostFieldMapper.class); return rootMapper(BoostFieldMapper.class);
} }
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
public Analyzer searchQuotedAnalyzer() {
return this.searchQuoteAnalyzer;
}
public Filter typeFilter() { public Filter typeFilter() {
return this.typeFilter; return this.typeFilter;
} }
@ -765,23 +706,6 @@ public class DocumentMapper implements ToXContent {
rootObjectMapper.toXContent(builder, params, new ToXContent() { rootObjectMapper.toXContent(builder, params, new ToXContent() {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
}
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
if (!searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
if (sourceTransforms != null) { if (sourceTransforms != null) {
if (sourceTransforms.size() == 1) { if (sourceTransforms.size() == 1) {
builder.field("transform"); builder.field("transform");

View File

@ -79,6 +79,8 @@ import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.ScriptService.ScriptType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -244,36 +246,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue(); Object fieldNode = entry.getValue();
if ("index_analyzer".equals(fieldName)) { if ("transform".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for index_analyzer setting on root type [" + type + "]");
}
docBuilder.indexAnalyzer(analyzer);
} else if ("search_analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for search_analyzer setting on root type [" + type + "]");
}
docBuilder.searchAnalyzer(analyzer);
} else if ("search_quote_analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for search_analyzer setting on root type [" + type + "]");
}
docBuilder.searchQuoteAnalyzer(analyzer);
} else if ("analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for analyzer setting on root type [" + type + "]");
}
docBuilder.indexAnalyzer(analyzer);
docBuilder.searchAnalyzer(analyzer);
} else if ("transform".equals(fieldName)) {
iterator.remove(); iterator.remove();
if (fieldNode instanceof Map) { if (fieldNode instanceof Map) {
parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated()); parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated());
@ -307,16 +280,6 @@ public class DocumentMapperParser extends AbstractIndexComponent {
checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: "); checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");
if (!docBuilder.hasIndexAnalyzer()) {
docBuilder.indexAnalyzer(analysisService.defaultIndexAnalyzer());
}
if (!docBuilder.hasSearchAnalyzer()) {
docBuilder.searchAnalyzer(analysisService.defaultSearchAnalyzer());
}
if (!docBuilder.hasSearchQuoteAnalyzer()) {
docBuilder.searchAnalyzer(analysisService.defaultSearchQuoteAnalyzer());
}
DocumentMapper documentMapper = docBuilder.build(this); DocumentMapper documentMapper = docBuilder.build(this);
// update the source with the generated one // update the source with the generated one
documentMapper.refreshSource(); documentMapper.refreshSource();

View File

@ -1040,9 +1040,6 @@ public class MapperService extends AbstractIndexComponent {
return analyzer; return analyzer;
} }
} }
if (docMapper != null && docMapper.searchAnalyzer() != null) {
return docMapper.searchAnalyzer();
}
return mapperService.searchAnalyzer(); return mapperService.searchAnalyzer();
} }
@ -1053,9 +1050,6 @@ public class MapperService extends AbstractIndexComponent {
return analyzer; return analyzer;
} }
} }
if (docMapper != null && docMapper.searchQuotedAnalyzer() != null) {
return docMapper.searchQuotedAnalyzer();
}
return mapperService.searchQuoteAnalyzer(); return mapperService.searchQuoteAnalyzer();
} }
} }

View File

@ -400,7 +400,7 @@ public class IndexShard extends AbstractIndexShardComponent {
long startTime = System.nanoTime(); long startTime = System.nanoTime();
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(source.type()); Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(source.type());
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper); ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates, autoGeneratedId); return new Engine.Create(docMapper.v1(), docMapper.v1().mappers().indexAnalyzer(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates, autoGeneratedId);
} }
public ParsedDocument create(Engine.Create create) throws ElasticsearchException { public ParsedDocument create(Engine.Create create) throws ElasticsearchException {
@ -424,7 +424,7 @@ public class IndexShard extends AbstractIndexShardComponent {
long startTime = System.nanoTime(); long startTime = System.nanoTime();
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(source.type()); Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(source.type());
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper); ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates); return new Engine.Index(docMapper.v1(), docMapper.v1().mappers().indexAnalyzer(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates);
} }
public ParsedDocument index(Engine.Index index) throws ElasticsearchException { public ParsedDocument index(Engine.Index index) throws ElasticsearchException {

View File

@ -101,10 +101,7 @@ import static org.hamcrest.Matchers.*;
public class InternalEngineTests extends ElasticsearchLuceneTestCase { public class InternalEngineTests extends ElasticsearchLuceneTestCase {
protected final ShardId shardId = new ShardId(new Index("index"), 1); protected final ShardId shardId = new ShardId(new Index("index"), 1);
protected final DocumentMapper fakeType = new DocumentMapper.Builder("type", protected final Analyzer analyzer = Lucene.STANDARD_ANALYZER;
ImmutableSettings.settingsBuilder().put("index.version.created", Version.CURRENT).build(),
new RootObjectMapper.Builder("")).
indexAnalyzer(Lucene.STANDARD_ANALYZER).build(null);
protected ThreadPool threadPool; protected ThreadPool threadPool;
@ -256,10 +253,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// create a doc and refresh // create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false); ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.refresh("test", false); engine.refresh("test", false);
segments = engine.segments(false); segments = engine.segments(false);
@ -292,7 +289,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build()); engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build());
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false); ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("3"), doc3)); engine.create(new Engine.Create(null, analyzer, newUid("3"), doc3));
engine.refresh("test", false); engine.refresh("test", false);
segments = engine.segments(false); segments = engine.segments(false);
@ -339,7 +336,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true).build()); engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true).build());
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, false); ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("4"), doc4)); engine.create(new Engine.Create(null, analyzer, newUid("4"), doc4));
engine.refresh("test", false); engine.refresh("test", false);
segments = engine.segments(false); segments = engine.segments(false);
@ -370,7 +367,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.refresh("test", false); engine.refresh("test", false);
segments = engine.segments(true); segments = engine.segments(true);
@ -378,10 +375,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
assertThat(segments.get(0).ramTree, notNullValue()); assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false); ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.refresh("test", false); engine.refresh("test", false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false); ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("3"), doc3)); engine.create(new Engine.Create(null, analyzer, newUid("3"), doc3));
engine.refresh("test", false); engine.refresh("test", false);
segments = engine.segments(true); segments = engine.segments(true);
@ -421,11 +418,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
final Engine engine = createEngine(engineSettingsService, store, createTranslog(), mergeSchedulerProvider); final Engine engine = createEngine(engineSettingsService, store, createTranslog(), mergeSchedulerProvider);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertThat(engine.segments(false).size(), equalTo(1)); assertThat(engine.segments(false).size(), equalTo(1));
index = new Engine.Index(fakeType, newUid("2"), doc); index = new Engine.Index(null, analyzer, newUid("2"), doc);
engine.index(index); engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
List<Segment> segments = engine.segments(false); List<Segment> segments = engine.segments(false);
@ -433,7 +430,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
for (Segment segment : segments) { for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue()); assertThat(segment.getMergeId(), nullValue());
} }
index = new Engine.Index(fakeType, newUid("3"), doc); index = new Engine.Index(null, analyzer, newUid("3"), doc);
engine.index(index); engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
segments = engine.segments(false); segments = engine.segments(false);
@ -453,7 +450,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
waitForMerge.get().countDown(); waitForMerge.get().countDown();
index = new Engine.Index(fakeType, newUid("4"), doc); index = new Engine.Index(null, analyzer, newUid("4"), doc);
engine.index(index); engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration(); final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration();
@ -503,7 +500,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
Document document = testDocumentWithTextField(); Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
// its not there... // its not there...
searchResult = engine.acquireSearcher("test"); searchResult = engine.acquireSearcher("test");
@ -542,7 +539,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
engine.index(new Engine.Index(fakeType, newUid("1"), doc)); engine.index(new Engine.Index(null, analyzer, newUid("1"), doc));
// its not updated yet... // its not updated yet...
searchResult = engine.acquireSearcher("test"); searchResult = engine.acquireSearcher("test");
@ -595,7 +592,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document = testDocumentWithTextField(); document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
// its not there... // its not there...
searchResult = engine.acquireSearcher("test"); searchResult = engine.acquireSearcher("test");
@ -629,7 +626,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document = testDocument(); document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
engine.index(new Engine.Index(fakeType, newUid("1"), doc)); engine.index(new Engine.Index(null, analyzer, newUid("1"), doc));
// its not updated yet... // its not updated yet...
searchResult = engine.acquireSearcher("test"); searchResult = engine.acquireSearcher("test");
@ -658,7 +655,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// create a document // create a document
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
// its not there... // its not there...
searchResult = engine.acquireSearcher("test"); searchResult = engine.acquireSearcher("test");
@ -691,7 +688,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testFailEngineOnCorruption() { public void testFailEngineOnCorruption() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
final boolean failEngine = defaultSettings.getAsBoolean(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, false); final boolean failEngine = defaultSettings.getAsBoolean(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, false);
final int failInPhase = randomIntBetween(1, 3); final int failInPhase = randomIntBetween(1, 3);
@ -729,7 +726,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
searchResult.close(); searchResult.close();
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false); ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.refresh("foo", false); engine.refresh("foo", false);
searchResult = engine.acquireSearcher("test"); searchResult = engine.acquireSearcher("test");
@ -746,7 +743,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testSimpleRecover() throws Exception { public void testSimpleRecover() throws Exception {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
engine.recover(new Engine.RecoveryHandler() { engine.recover(new Engine.RecoveryHandler() {
@ -791,10 +788,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception { public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc1)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc1));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false); ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.recover(new Engine.RecoveryHandler() { engine.recover(new Engine.RecoveryHandler() {
@Override @Override
@ -822,10 +819,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception { public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc1)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc1));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false); ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.recover(new Engine.RecoveryHandler() { engine.recover(new Engine.RecoveryHandler() {
@Override @Override
@ -841,7 +838,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// add for phase3 // add for phase3
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false); ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("3"), doc3)); engine.create(new Engine.Create(null, analyzer, newUid("3"), doc3));
} }
@Override @Override
@ -860,11 +857,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningNewCreate() { public void testVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc); Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc);
engine.create(create); engine.create(create);
assertThat(create.version(), equalTo(1l)); assertThat(create.version(), equalTo(1l));
create = new Engine.Create(fakeType, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); create = new Engine.Create(null, analyzer, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.create(create); replicaEngine.create(create);
assertThat(create.version(), equalTo(1l)); assertThat(create.version(), equalTo(1l));
} }
@ -872,11 +869,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testExternalVersioningNewCreate() { public void testExternalVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0); Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
engine.create(create); engine.create(create);
assertThat(create.version(), equalTo(12l)); assertThat(create.version(), equalTo(12l));
create = new Engine.Create(fakeType, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); create = new Engine.Create(null, analyzer, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.create(create); replicaEngine.create(create);
assertThat(create.version(), equalTo(12l)); assertThat(create.version(), equalTo(12l));
} }
@ -884,11 +881,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningNewIndex() { public void testVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index); replicaEngine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
} }
@ -896,11 +893,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testExternalVersioningNewIndex() { public void testExternalVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(12l)); assertThat(index.version(), equalTo(12l));
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index); replicaEngine.index(index);
assertThat(index.version(), equalTo(12l)); assertThat(index.version(), equalTo(12l));
} }
@ -908,15 +905,15 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningIndexConflict() { public void testVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -925,7 +922,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
} }
// future versions should not work as well // future versions should not work as well
index = new Engine.Index(fakeType, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -937,15 +934,15 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testExternalVersioningIndexConflict() { public void testExternalVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(12l)); assertThat(index.version(), equalTo(12l));
index = new Engine.Index(fakeType, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(14l)); assertThat(index.version(), equalTo(14l));
index = new Engine.Index(fakeType, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -957,17 +954,17 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningIndexConflictWithFlush() { public void testVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -976,7 +973,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
} }
// future versions should not work as well // future versions should not work as well
index = new Engine.Index(fakeType, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -988,17 +985,17 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testExternalVersioningIndexConflictWithFlush() { public void testExternalVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(12l)); assertThat(index.version(), equalTo(12l));
index = new Engine.Index(fakeType, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(14l)); assertThat(index.version(), equalTo(14l));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
index = new Engine.Index(fakeType, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -1010,11 +1007,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningDeleteConflict() { public void testVersioningDeleteConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
@ -1041,7 +1038,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
assertThat(delete.version(), equalTo(3l)); assertThat(delete.version(), equalTo(3l));
// now check if we can index to a delete doc with version // now check if we can index to a delete doc with version
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -1050,7 +1047,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
} }
// we shouldn't be able to create as well // we shouldn't be able to create as well
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0); Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.create(create); engine.create(create);
} catch (VersionConflictEngineException e) { } catch (VersionConflictEngineException e) {
@ -1061,11 +1058,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningDeleteConflictWithFlush() { public void testVersioningDeleteConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
@ -1098,7 +1095,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
// now check if we can index to a delete doc with version // now check if we can index to a delete doc with version
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.index(index); engine.index(index);
fail(); fail();
@ -1107,7 +1104,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
} }
// we shouldn't be able to create as well // we shouldn't be able to create as well
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0); Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.create(create); engine.create(create);
} catch (VersionConflictEngineException e) { } catch (VersionConflictEngineException e) {
@ -1118,11 +1115,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningCreateExistsException() { public void testVersioningCreateExistsException() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0); Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create); engine.create(create);
assertThat(create.version(), equalTo(1l)); assertThat(create.version(), equalTo(1l));
create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0); create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.create(create); engine.create(create);
fail(); fail();
@ -1134,13 +1131,13 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningCreateExistsExceptionWithFlush() { public void testVersioningCreateExistsExceptionWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0); Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create); engine.create(create);
assertThat(create.version(), equalTo(1l)); assertThat(create.version(), equalTo(1l));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0); create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
try { try {
engine.create(create); engine.create(create);
fail(); fail();
@ -1152,21 +1149,21 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningReplicaConflict1() { public void testVersioningReplicaConflict1() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
// apply the second index to the replica, should work fine // apply the second index to the replica, should work fine
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index); replicaEngine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
// now, the old one should not work // now, the old one should not work
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
try { try {
replicaEngine.index(index); replicaEngine.index(index);
fail(); fail();
@ -1176,7 +1173,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// second version on replica should fail as well // second version on replica should fail as well
try { try {
index = new Engine.Index(fakeType, newUid("1"), doc, 2l index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); , VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index); replicaEngine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
@ -1188,18 +1185,18 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testVersioningReplicaConflict2() { public void testVersioningReplicaConflict2() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
// apply the first index to the replica, should work fine // apply the first index to the replica, should work fine
index = new Engine.Index(fakeType, newUid("1"), doc, 1l index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); , VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index); replicaEngine.index(index);
assertThat(index.version(), equalTo(1l)); assertThat(index.version(), equalTo(1l));
// index it again // index it again
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertThat(index.version(), equalTo(2l)); assertThat(index.version(), equalTo(2l));
@ -1226,7 +1223,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// now do the second index on the replica, it should fail // now do the second index on the replica, it should fail
try { try {
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index); replicaEngine.index(index);
fail("excepted VersionConflictEngineException to be thrown"); fail("excepted VersionConflictEngineException to be thrown");
} catch (VersionConflictEngineException e) { } catch (VersionConflictEngineException e) {
@ -1238,17 +1235,17 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testBasicCreatedFlag() { public void testBasicCreatedFlag() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertTrue(index.created()); assertTrue(index.created());
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertFalse(index.created()); assertFalse(index.created());
engine.delete(new Engine.Delete(null, "1", newUid("1"))); engine.delete(new Engine.Delete(null, "1", newUid("1")));
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertTrue(index.created()); assertTrue(index.created());
} }
@ -1256,7 +1253,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test @Test
public void testCreatedFlagAfterFlush() { public void testCreatedFlagAfterFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc); Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertTrue(index.created()); assertTrue(index.created());
@ -1264,7 +1261,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
index = new Engine.Index(fakeType, newUid("1"), doc); index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index); engine.index(index);
assertTrue(index.created()); assertTrue(index.created());
} }
@ -1312,13 +1309,13 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
try { try {
// First, with DEBUG, which should NOT log IndexWriter output: // First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertFalse(mockAppender.sawIndexWriterMessage); assertFalse(mockAppender.sawIndexWriterMessage);
// Again, with TRACE, which should log IndexWriter output: // Again, with TRACE, which should log IndexWriter output:
rootLogger.setLevel(Level.TRACE); rootLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(fakeType, newUid("2"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertTrue(mockAppender.sawIndexWriterMessage); assertTrue(mockAppender.sawIndexWriterMessage);
@ -1347,14 +1344,14 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
try { try {
// First, with DEBUG, which should NOT log IndexWriter output: // First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertFalse(mockAppender.sawIndexWriterMessage); assertFalse(mockAppender.sawIndexWriterMessage);
assertFalse(mockAppender.sawIndexWriterIFDMessage); assertFalse(mockAppender.sawIndexWriterIFDMessage);
// Again, with TRACE, which should only log IndexWriter IFD output: // Again, with TRACE, which should only log IndexWriter IFD output:
iwIFDLogger.setLevel(Level.TRACE); iwIFDLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(fakeType, newUid("2"), doc)); engine.create(new Engine.Create(null, analyzer, newUid("2"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false); engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertFalse(mockAppender.sawIndexWriterMessage); assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage); assertTrue(mockAppender.sawIndexWriterIFDMessage);
@ -1384,7 +1381,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new TextField("value", "test1", Field.Store.YES));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
engine.index(new Engine.Index(fakeType, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false)); engine.index(new Engine.Index(null, analyzer, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
// Delete document we just added: // Delete document we just added:
engine.delete(new Engine.Delete("test", "1", newUid("1"), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false)); engine.delete(new Engine.Delete("test", "1", newUid("1"), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
@ -1409,7 +1406,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// Try to index uid=1 with a too-old version, should fail: // Try to index uid=1 with a too-old version, should fail:
try { try {
engine.index(new Engine.Index(fakeType, newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime())); engine.index(new Engine.Index(null, analyzer, newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
fail("did not hit expected exception"); fail("did not hit expected exception");
} catch (VersionConflictEngineException vcee) { } catch (VersionConflictEngineException vcee) {
// expected // expected
@ -1421,7 +1418,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// Try to index uid=2 with a too-old version, should fail: // Try to index uid=2 with a too-old version, should fail:
try { try {
engine.index(new Engine.Index(fakeType, newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime())); engine.index(new Engine.Index(null, analyzer, newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
fail("did not hit expected exception"); fail("did not hit expected exception");
} catch (VersionConflictEngineException vcee) { } catch (VersionConflictEngineException vcee) {
// expected // expected

View File

@ -419,9 +419,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
rootTypes.put(SourceFieldMapper.NAME, "{\"enabled\" : true}"); rootTypes.put(SourceFieldMapper.NAME, "{\"enabled\" : true}");
rootTypes.put(TypeFieldMapper.NAME, "{\"store\" : true}"); rootTypes.put(TypeFieldMapper.NAME, "{\"store\" : true}");
rootTypes.put("include_in_all", "true"); rootTypes.put("include_in_all", "true");
rootTypes.put("index_analyzer", "\"standard\"");
rootTypes.put("search_analyzer", "\"standard\"");
rootTypes.put("analyzer", "\"standard\"");
rootTypes.put("dynamic_date_formats", "[\"yyyy-MM-dd\", \"dd-MM-yyyy\"]"); rootTypes.put("dynamic_date_formats", "[\"yyyy-MM-dd\", \"dd-MM-yyyy\"]");
rootTypes.put("numeric_detection", "true"); rootTypes.put("numeric_detection", "true");
rootTypes.put("dynamic_templates", "[]"); rootTypes.put("dynamic_templates", "[]");

View File

@ -148,10 +148,10 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
} }
private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException { private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException {
assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.indexAnalyzer(), null); TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null);
tokenStream.reset(); tokenStream.reset();
NumericTermAttribute nta = tokenStream.addAttribute(NumericTermAttribute.class); NumericTermAttribute nta = tokenStream.addAttribute(NumericTermAttribute.class);
List<Long> values = new ArrayList<>(); List<Long> values = new ArrayList<>();
@ -159,7 +159,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
values.add(nta.getRawValue()); values.add(nta.getRawValue());
} }
tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.indexAnalyzer(), null); tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null);
tokenStream.reset(); tokenStream.reset();
nta = tokenStream.addAttribute(NumericTermAttribute.class); nta = tokenStream.addAttribute(NumericTermAttribute.class);
int pos = 0; int pos = 0;
@ -184,7 +184,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.endObject() .endObject()
.bytes()); .bytes());
assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
} }
@Test @Test

View File

@ -58,8 +58,8 @@ public class DoubleIndexingDocTest extends ElasticsearchSingleNodeLuceneTestCase
.endObject() .endObject()
.bytes()); .bytes());
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer()); writer.addDocument(doc.rootDoc(), mapper.mappers().indexAnalyzer());
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer()); writer.addDocument(doc.rootDoc(), mapper.mappers().indexAnalyzer());
IndexReader reader = DirectoryReader.open(writer, true); IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader); IndexSearcher searcher = new IndexSearcher(reader);

View File

@ -73,7 +73,7 @@ public class StoredNumericValuesTest extends ElasticsearchSingleNodeTest {
.endObject() .endObject()
.bytes()); .bytes());
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer()); writer.addDocument(doc.rootDoc(), mapper.mappers().indexAnalyzer());
// Indexing a doc in the old way // Indexing a doc in the old way
FieldType fieldType = new FieldType(); FieldType fieldType = new FieldType();

View File

@ -47,7 +47,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1")); ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(false)); assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(false));
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
} }
@Test @Test
@ -65,7 +65,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1")); ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true));
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
} }
@Test @Test

View File

@ -85,7 +85,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().getField("_timestamp").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_timestamp").fieldType().stored(), equalTo(true));
assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_timestamp").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_timestamp").fieldType().indexOptions());
assertThat(doc.rootDoc().getField("_timestamp").tokenStream(docMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField("_timestamp").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
} }
@Test @Test

View File

@ -69,7 +69,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().getField("_ttl").fieldType().stored(), equalTo(true)); assertThat(doc.rootDoc().getField("_ttl").fieldType().stored(), equalTo(true));
assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_ttl").fieldType().indexOptions()); assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_ttl").fieldType().indexOptions());
assertThat(doc.rootDoc().getField("_ttl").tokenStream(docMapper.indexAnalyzer(), null), notNullValue()); assertThat(doc.rootDoc().getField("_ttl").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
} }
@Test @Test