Mappings: Remove type level default analyzers

closes #8874
This commit is contained in:
Ryan Ernst 2015-01-08 09:08:14 -08:00
parent 6f894b1d2c
commit cff0ec3972
17 changed files with 157 additions and 261 deletions

View File

@ -131,16 +131,32 @@ def create_client(http_port, timeout=30):
time.sleep(1)
assert False, 'Timed out waiting for node for %s seconds' % timeout
def generate_index(client):
def generate_index(client, version):
client.indices.delete(index='test', ignore=404)
num_shards = random.randint(1, 10)
num_replicas = random.randint(0, 1)
logging.info('Create single shard test index')
mappings = {}
if not version.startswith('2.'):
# TODO: we need better "before/onOr/after" logic in python
# backcompat test for legacy type level analyzer settings, see #8874
mappings['analyzer_type1'] = {
'analyzer': 'standard',
}
mappings['analyzer_type2'] = {
'index_analyzer': 'standard',
'search_analyzer': 'keyword',
'search_quote_analyzer': 'english',
}
client.indices.create(index='test', body={
'settings': {
'number_of_shards': 1,
'number_of_replicas': 0
}
},
'mappings': mappings
})
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
assert health['timed_out'] == False, 'cluster health timed out %s' % health
@ -152,32 +168,32 @@ def generate_index(client):
def snapshot_index(client, cfg):
# Add bogus persistent settings to make sure they can be restored
client.cluster.put_settings(body = {
client.cluster.put_settings(body={
'persistent': {
'cluster.routing.allocation.exclude.version_attr' : cfg.version
'cluster.routing.allocation.exclude.version_attr': cfg.version
}
})
client.indices.put_template(name = 'template_' + cfg.version.lower(), order = 0, body = {
"template" : "te*",
"settings" : {
client.indices.put_template(name='template_' + cfg.version.lower(), order=0, body={
"template": "te*",
"settings": {
"number_of_shards" : 1
},
"mappings" : {
"type1" : {
"_source" : { "enabled" : False }
"mappings": {
"type1": {
"_source": { "enabled" : False }
}
},
"aliases" : {
"alias1" : {},
"alias2" : {
"filter" : {
"term" : {"version" : cfg.version }
"aliases": {
"alias1": {},
"alias2": {
"filter": {
"term": {"version" : cfg.version }
},
"routing" : "kimchy"
"routing": "kimchy"
},
"{index}-alias" : {}
"{index}-alias": {}
}
});
})
client.snapshot.create_repository(repository='test_repo', body={
'type': 'fs',
'settings': {
@ -243,7 +259,7 @@ def main():
try:
node = start_node(cfg.version, cfg.release_dir, cfg.data_dir, cfg.tcp_port, cfg.http_port)
client = create_client(cfg.http_port)
generate_index(client)
generate_index(client, cfg.version)
if cfg.snapshot_supported:
snapshot_index(client, cfg)
finally:

View File

@ -286,6 +286,7 @@ public interface Engine extends Closeable {
static abstract class IndexingOperation implements Operation {
private final DocumentMapper docMapper;
private final Analyzer analyzer;
private final Term uid;
private final ParsedDocument doc;
private long version;
@ -296,8 +297,9 @@ public interface Engine extends Closeable {
private final long startTime;
private long endTime;
public IndexingOperation(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
public IndexingOperation(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
this.docMapper = docMapper;
this.analyzer = analyzer;
this.uid = uid;
this.doc = doc;
this.version = version;
@ -307,8 +309,8 @@ public interface Engine extends Closeable {
this.canHaveDuplicates = canHaveDuplicates;
}
public IndexingOperation(DocumentMapper docMapper, Term uid, ParsedDocument doc) {
this(docMapper, uid, doc, Versions.MATCH_ANY, VersionType.INTERNAL, Origin.PRIMARY, System.nanoTime(), true);
public IndexingOperation(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc) {
this(docMapper, analyzer, uid, doc, Versions.MATCH_ANY, VersionType.INTERNAL, Origin.PRIMARY, System.nanoTime(), true);
}
public DocumentMapper docMapper() {
@ -374,7 +376,7 @@ public interface Engine extends Closeable {
}
public Analyzer analyzer() {
return docMapper.mappers().indexAnalyzer();
return this.analyzer;
}
public BytesReference source() {
@ -403,17 +405,17 @@ public interface Engine extends Closeable {
static final class Create extends IndexingOperation {
private final boolean autoGeneratedId;
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates, boolean autoGeneratedId) {
super(docMapper, uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
public Create(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates, boolean autoGeneratedId) {
super(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
this.autoGeneratedId = autoGeneratedId;
}
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
this(docMapper, uid, doc, version, versionType, origin, startTime, true, false);
public Create(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
this(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, true, false);
}
public Create(DocumentMapper docMapper, Term uid, ParsedDocument doc) {
super(docMapper, uid, doc);
public Create(DocumentMapper docMapper,Analyzer analyzer, Term uid, ParsedDocument doc) {
super(docMapper, analyzer, uid, doc);
autoGeneratedId = false;
}
@ -431,16 +433,16 @@ public interface Engine extends Closeable {
static final class Index extends IndexingOperation {
private boolean created;
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
super(docMapper, uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
public Index(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
super(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
}
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
super(docMapper, uid, doc, version, versionType, origin, startTime, true);
public Index(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
super(docMapper, analyzer, uid, doc, version, versionType, origin, startTime, true);
}
public Index(DocumentMapper docMapper, Term uid, ParsedDocument doc) {
super(docMapper, uid, doc);
public Index(DocumentMapper docMapper, Analyzer analyzer, Term uid, ParsedDocument doc) {
super(docMapper, analyzer, uid, doc);
}
@Override

View File

@ -24,6 +24,7 @@ import com.google.common.collect.Collections2;
import com.google.common.collect.ForwardingSet;
import com.google.common.collect.Maps;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.Collection;
@ -42,8 +43,10 @@ public final class DocumentFieldMappers extends ForwardingSet<FieldMapper<?>> {
private final FieldNameAnalyzer searchAnalyzer;
private final FieldNameAnalyzer searchQuoteAnalyzer;
public DocumentFieldMappers(DocumentMapper docMapper) {
this(new FieldMappersLookup(), new FieldNameAnalyzer(docMapper.indexAnalyzer()), new FieldNameAnalyzer(docMapper.searchAnalyzer()), new FieldNameAnalyzer(docMapper.searchQuotedAnalyzer()));
public DocumentFieldMappers(AnalysisService analysisService) {
this(new FieldMappersLookup(), new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
}
private DocumentFieldMappers(FieldMappersLookup fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {

View File

@ -166,12 +166,6 @@ public class DocumentMapper implements ToXContent {
private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>();
private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer;
private NamedAnalyzer searchQuoteAnalyzer;
private List<SourceTransform> sourceTransforms;
private final String index;
@ -228,36 +222,6 @@ public class DocumentMapper implements ToXContent {
return this;
}
public Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
this.indexAnalyzer = indexAnalyzer;
return this;
}
public boolean hasIndexAnalyzer() {
return indexAnalyzer != null;
}
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
this.searchAnalyzer = searchAnalyzer;
if (this.searchQuoteAnalyzer == null) {
this.searchQuoteAnalyzer = searchAnalyzer;
}
return this;
}
public Builder searchQuoteAnalyzer(NamedAnalyzer searchQuoteAnalyzer) {
this.searchQuoteAnalyzer = searchQuoteAnalyzer;
return this;
}
public boolean hasSearchAnalyzer() {
return searchAnalyzer != null;
}
public boolean hasSearchQuoteAnalyzer() {
return searchQuoteAnalyzer != null;
}
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map<String, Object> parameters) {
if (sourceTransforms == null) {
sourceTransforms = new ArrayList<>();
@ -268,8 +232,7 @@ public class DocumentMapper implements ToXContent {
public DocumentMapper build(DocumentMapperParser docMapperParser) {
Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new DocumentMapper(index, indexSettings, docMapperParser, rootObjectMapper, meta,
indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer, rootMappers, sourceTransforms);
return new DocumentMapper(index, indexSettings, docMapperParser, rootObjectMapper, meta, rootMappers, sourceTransforms);
}
}
@ -300,11 +263,6 @@ public class DocumentMapper implements ToXContent {
private final RootMapper[] rootMappersOrdered;
private final RootMapper[] rootMappersNotIncludedInObject;
private final NamedAnalyzer indexAnalyzer;
private final NamedAnalyzer searchAnalyzer;
private final NamedAnalyzer searchQuoteAnalyzer;
private volatile DocumentFieldMappers fieldMappers;
private volatile ImmutableMap<String, ObjectMapper> objectMappers = ImmutableMap.of();
@ -324,7 +282,6 @@ public class DocumentMapper implements ToXContent {
public DocumentMapper(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
ImmutableMap<String, Object> meta,
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, NamedAnalyzer searchQuoteAnalyzer,
Map<Class<? extends RootMapper>, RootMapper> rootMappers, List<SourceTransform> sourceTransforms) {
this.index = index;
this.indexSettings = indexSettings;
@ -345,10 +302,6 @@ public class DocumentMapper implements ToXContent {
}
this.rootMappersNotIncludedInObject = rootMappersNotIncludedInObjectLst.toArray(new RootMapper[rootMappersNotIncludedInObjectLst.size()]);
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
this.searchQuoteAnalyzer = searchQuoteAnalyzer != null ? searchQuoteAnalyzer : searchAnalyzer;
this.typeFilter = typeMapper().termFilter(type, null);
if (rootMapper(ParentFieldMapper.class).active()) {
@ -370,7 +323,7 @@ public class DocumentMapper implements ToXContent {
// now traverse and get all the statically defined ones
rootObjectMapper.traverse(fieldMappersAgg);
this.fieldMappers = new DocumentFieldMappers(this).copyAndAllAll(fieldMappersAgg.mappers);
this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(fieldMappersAgg.mappers);
final Map<String, ObjectMapper> objectMappers = Maps.newHashMap();
rootObjectMapper.traverse(new ObjectMapperListener() {
@ -470,18 +423,6 @@ public class DocumentMapper implements ToXContent {
return rootMapper(BoostFieldMapper.class);
}
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
public Analyzer searchQuotedAnalyzer() {
return this.searchQuoteAnalyzer;
}
public Filter typeFilter() {
return this.typeFilter;
}
@ -765,23 +706,6 @@ public class DocumentMapper implements ToXContent {
rootObjectMapper.toXContent(builder, params, new ToXContent() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (indexAnalyzer != null && searchAnalyzer != null && indexAnalyzer.name().equals(searchAnalyzer.name()) && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
// same analyzers, output it once
builder.field("analyzer", indexAnalyzer.name());
}
} else {
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
if (!indexAnalyzer.name().equals("default")) {
builder.field("index_analyzer", indexAnalyzer.name());
}
}
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
if (!searchAnalyzer.name().equals("default")) {
builder.field("search_analyzer", searchAnalyzer.name());
}
}
}
if (sourceTransforms != null) {
if (sourceTransforms.size() == 1) {
builder.field("transform");

View File

@ -79,6 +79,8 @@ import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptService.ScriptType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -244,36 +246,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if ("index_analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for index_analyzer setting on root type [" + type + "]");
}
docBuilder.indexAnalyzer(analyzer);
} else if ("search_analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for search_analyzer setting on root type [" + type + "]");
}
docBuilder.searchAnalyzer(analyzer);
} else if ("search_quote_analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for search_analyzer setting on root type [" + type + "]");
}
docBuilder.searchQuoteAnalyzer(analyzer);
} else if ("analyzer".equals(fieldName)) {
iterator.remove();
NamedAnalyzer analyzer = analysisService.analyzer(fieldNode.toString());
if (analyzer == null) {
throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for analyzer setting on root type [" + type + "]");
}
docBuilder.indexAnalyzer(analyzer);
docBuilder.searchAnalyzer(analyzer);
} else if ("transform".equals(fieldName)) {
if ("transform".equals(fieldName)) {
iterator.remove();
if (fieldNode instanceof Map) {
parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated());
@ -307,16 +280,6 @@ public class DocumentMapperParser extends AbstractIndexComponent {
checkNoRemainingFields(mapping, parserContext.indexVersionCreated(), "Root mapping definition has unsupported parameters: ");
if (!docBuilder.hasIndexAnalyzer()) {
docBuilder.indexAnalyzer(analysisService.defaultIndexAnalyzer());
}
if (!docBuilder.hasSearchAnalyzer()) {
docBuilder.searchAnalyzer(analysisService.defaultSearchAnalyzer());
}
if (!docBuilder.hasSearchQuoteAnalyzer()) {
docBuilder.searchAnalyzer(analysisService.defaultSearchQuoteAnalyzer());
}
DocumentMapper documentMapper = docBuilder.build(this);
// update the source with the generated one
documentMapper.refreshSource();

View File

@ -1040,9 +1040,6 @@ public class MapperService extends AbstractIndexComponent {
return analyzer;
}
}
if (docMapper != null && docMapper.searchAnalyzer() != null) {
return docMapper.searchAnalyzer();
}
return mapperService.searchAnalyzer();
}
@ -1053,9 +1050,6 @@ public class MapperService extends AbstractIndexComponent {
return analyzer;
}
}
if (docMapper != null && docMapper.searchQuotedAnalyzer() != null) {
return docMapper.searchQuotedAnalyzer();
}
return mapperService.searchQuoteAnalyzer();
}
}

View File

@ -400,7 +400,7 @@ public class IndexShard extends AbstractIndexShardComponent {
long startTime = System.nanoTime();
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(source.type());
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates, autoGeneratedId);
return new Engine.Create(docMapper.v1(), docMapper.v1().mappers().indexAnalyzer(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates, autoGeneratedId);
}
public ParsedDocument create(Engine.Create create) throws ElasticsearchException {
@ -424,7 +424,7 @@ public class IndexShard extends AbstractIndexShardComponent {
long startTime = System.nanoTime();
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(source.type());
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates);
return new Engine.Index(docMapper.v1(), docMapper.v1().mappers().indexAnalyzer(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates);
}
public ParsedDocument index(Engine.Index index) throws ElasticsearchException {

View File

@ -101,10 +101,7 @@ import static org.hamcrest.Matchers.*;
public class InternalEngineTests extends ElasticsearchLuceneTestCase {
protected final ShardId shardId = new ShardId(new Index("index"), 1);
protected final DocumentMapper fakeType = new DocumentMapper.Builder("type",
ImmutableSettings.settingsBuilder().put("index.version.created", Version.CURRENT).build(),
new RootObjectMapper.Builder("")).
indexAnalyzer(Lucene.STANDARD_ANALYZER).build(null);
protected final Analyzer analyzer = Lucene.STANDARD_ANALYZER;
protected ThreadPool threadPool;
@ -256,10 +253,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.refresh("test", false);
segments = engine.segments(false);
@ -292,7 +289,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build());
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("3"), doc3));
engine.create(new Engine.Create(null, analyzer, newUid("3"), doc3));
engine.refresh("test", false);
segments = engine.segments(false);
@ -339,7 +336,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engineSettingsService.refreshSettings(ImmutableSettings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true).build());
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("4"), doc4));
engine.create(new Engine.Create(null, analyzer, newUid("4"), doc4));
engine.refresh("test", false);
segments = engine.segments(false);
@ -370,7 +367,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.refresh("test", false);
segments = engine.segments(true);
@ -378,10 +375,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.refresh("test", false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("3"), doc3));
engine.create(new Engine.Create(null, analyzer, newUid("3"), doc3));
engine.refresh("test", false);
segments = engine.segments(true);
@ -421,11 +418,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
final Engine engine = createEngine(engineSettingsService, store, createTranslog(), mergeSchedulerProvider);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertThat(engine.segments(false).size(), equalTo(1));
index = new Engine.Index(fakeType, newUid("2"), doc);
index = new Engine.Index(null, analyzer, newUid("2"), doc);
engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
List<Segment> segments = engine.segments(false);
@ -433,7 +430,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = new Engine.Index(fakeType, newUid("3"), doc);
index = new Engine.Index(null, analyzer, newUid("3"), doc);
engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
segments = engine.segments(false);
@ -453,7 +450,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
waitForMerge.get().countDown();
index = new Engine.Index(fakeType, newUid("4"), doc);
index = new Engine.Index(null, analyzer, newUid("4"), doc);
engine.index(index);
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration();
@ -503,7 +500,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
// its not there...
searchResult = engine.acquireSearcher("test");
@ -542,7 +539,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
engine.index(new Engine.Index(fakeType, newUid("1"), doc));
engine.index(new Engine.Index(null, analyzer, newUid("1"), doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
@ -595,7 +592,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
// its not there...
searchResult = engine.acquireSearcher("test");
@ -629,7 +626,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
engine.index(new Engine.Index(fakeType, newUid("1"), doc));
engine.index(new Engine.Index(null, analyzer, newUid("1"), doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
@ -658,7 +655,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// create a document
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
// its not there...
searchResult = engine.acquireSearcher("test");
@ -691,7 +688,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testFailEngineOnCorruption() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
final boolean failEngine = defaultSettings.getAsBoolean(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, false);
final int failInPhase = randomIntBetween(1, 3);
@ -729,7 +726,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
searchResult.close();
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.refresh("foo", false);
searchResult = engine.acquireSearcher("test");
@ -746,7 +743,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testSimpleRecover() throws Exception {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
engine.recover(new Engine.RecoveryHandler() {
@ -791,10 +788,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc1));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc1));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.recover(new Engine.RecoveryHandler() {
@Override
@ -822,10 +819,10 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc1));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc1));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
engine.create(new Engine.Create(fakeType, newUid("2"), doc2));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc2));
engine.recover(new Engine.RecoveryHandler() {
@Override
@ -841,7 +838,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// add for phase3
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
engine.create(new Engine.Create(fakeType, newUid("3"), doc3));
engine.create(new Engine.Create(null, analyzer, newUid("3"), doc3));
}
@Override
@ -860,11 +857,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc);
Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc);
engine.create(create);
assertThat(create.version(), equalTo(1l));
create = new Engine.Create(fakeType, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
create = new Engine.Create(null, analyzer, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.create(create);
assertThat(create.version(), equalTo(1l));
}
@ -872,11 +869,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testExternalVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(12l));
create = new Engine.Create(fakeType, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
create = new Engine.Create(null, analyzer, newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.create(create);
assertThat(create.version(), equalTo(12l));
}
@ -884,11 +881,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(1l));
}
@ -896,11 +893,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testExternalVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(12l));
}
@ -908,15 +905,15 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
try {
engine.index(index);
fail();
@ -925,7 +922,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
}
// future versions should not work as well
index = new Engine.Index(fakeType, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -937,15 +934,15 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testExternalVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
index = new Engine.Index(fakeType, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(14l));
index = new Engine.Index(fakeType, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -957,17 +954,17 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -976,7 +973,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
}
// future versions should not work as well
index = new Engine.Index(fakeType, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -988,17 +985,17 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testExternalVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
index = new Engine.Index(fakeType, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(14l));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
index = new Engine.Index(fakeType, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -1010,11 +1007,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningDeleteConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
@ -1041,7 +1038,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
assertThat(delete.version(), equalTo(3l));
// now check if we can index to a delete doc with version
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -1050,7 +1047,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
}
// we shouldn't be able to create as well
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
} catch (VersionConflictEngineException e) {
@ -1061,11 +1058,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningDeleteConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
@ -1098,7 +1095,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
// now check if we can index to a delete doc with version
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
@ -1107,7 +1104,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
}
// we shouldn't be able to create as well
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
} catch (VersionConflictEngineException e) {
@ -1118,11 +1115,11 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningCreateExistsException() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(1l));
create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
fail();
@ -1134,13 +1131,13 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningCreateExistsExceptionWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Create create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
Engine.Create create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(1l));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
create = new Engine.Create(fakeType, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
create = new Engine.Create(null, analyzer, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
fail();
@ -1152,21 +1149,21 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningReplicaConflict1() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
// apply the second index to the replica, should work fine
index = new Engine.Index(fakeType, newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(2l));
// now, the old one should not work
index = new Engine.Index(fakeType, newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
try {
replicaEngine.index(index);
fail();
@ -1176,7 +1173,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// second version on replica should fail as well
try {
index = new Engine.Index(fakeType, newUid("1"), doc, 2l
index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(2l));
@ -1188,18 +1185,18 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testVersioningReplicaConflict2() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
// apply the first index to the replica, should work fine
index = new Engine.Index(fakeType, newUid("1"), doc, 1l
index = new Engine.Index(null, analyzer, newUid("1"), doc, 1l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(1l));
// index it again
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
@ -1226,7 +1223,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// now do the second index on the replica, it should fail
try {
index = new Engine.Index(fakeType, newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
index = new Engine.Index(null, analyzer, newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
fail("excepted VersionConflictEngineException to be thrown");
} catch (VersionConflictEngineException e) {
@ -1238,17 +1235,17 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testBasicCreatedFlag() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertTrue(index.created());
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertFalse(index.created());
engine.delete(new Engine.Delete(null, "1", newUid("1")));
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertTrue(index.created());
}
@ -1256,7 +1253,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testCreatedFlagAfterFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
Engine.Index index = new Engine.Index(fakeType, newUid("1"), doc);
Engine.Index index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertTrue(index.created());
@ -1264,7 +1261,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
index = new Engine.Index(fakeType, newUid("1"), doc);
index = new Engine.Index(null, analyzer, newUid("1"), doc);
engine.index(index);
assertTrue(index.created());
}
@ -1312,13 +1309,13 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertFalse(mockAppender.sawIndexWriterMessage);
// Again, with TRACE, which should log IndexWriter output:
rootLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(fakeType, newUid("2"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertTrue(mockAppender.sawIndexWriterMessage);
@ -1347,14 +1344,14 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
engine.create(new Engine.Create(fakeType, newUid("1"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("1"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertFalse(mockAppender.sawIndexWriterMessage);
assertFalse(mockAppender.sawIndexWriterIFDMessage);
// Again, with TRACE, which should only log IndexWriter IFD output:
iwIFDLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(fakeType, newUid("2"), doc));
engine.create(new Engine.Create(null, analyzer, newUid("2"), doc));
engine.flush(Engine.FlushType.COMMIT_TRANSLOG, false, false);
assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage);
@ -1384,7 +1381,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
document.add(new TextField("value", "test1", Field.Store.YES));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
engine.index(new Engine.Index(fakeType, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
engine.index(new Engine.Index(null, analyzer, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
// Delete document we just added:
engine.delete(new Engine.Delete("test", "1", newUid("1"), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
@ -1409,7 +1406,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// Try to index uid=1 with a too-old version, should fail:
try {
engine.index(new Engine.Index(fakeType, newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
engine.index(new Engine.Index(null, analyzer, newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
fail("did not hit expected exception");
} catch (VersionConflictEngineException vcee) {
// expected
@ -1421,7 +1418,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// Try to index uid=2 with a too-old version, should fail:
try {
engine.index(new Engine.Index(fakeType, newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
engine.index(new Engine.Index(null, analyzer, newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
fail("did not hit expected exception");
} catch (VersionConflictEngineException vcee) {
// expected

View File

@ -419,9 +419,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
rootTypes.put(SourceFieldMapper.NAME, "{\"enabled\" : true}");
rootTypes.put(TypeFieldMapper.NAME, "{\"store\" : true}");
rootTypes.put("include_in_all", "true");
rootTypes.put("index_analyzer", "\"standard\"");
rootTypes.put("search_analyzer", "\"standard\"");
rootTypes.put("analyzer", "\"standard\"");
rootTypes.put("dynamic_date_formats", "[\"yyyy-MM-dd\", \"dd-MM-yyyy\"]");
rootTypes.put("numeric_detection", "true");
rootTypes.put("dynamic_templates", "[]");

View File

@ -148,10 +148,10 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
}
private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException {
assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.indexAnalyzer(), null);
TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null);
tokenStream.reset();
NumericTermAttribute nta = tokenStream.addAttribute(NumericTermAttribute.class);
List<Long> values = new ArrayList<>();
@ -159,7 +159,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
values.add(nta.getRawValue());
}
tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.indexAnalyzer(), null);
tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null);
tokenStream.reset();
nta = tokenStream.addAttribute(NumericTermAttribute.class);
int pos = 0;
@ -184,7 +184,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.bytes());
assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue());
}
@Test

View File

@ -58,8 +58,8 @@ public class DoubleIndexingDocTest extends ElasticsearchSingleNodeLuceneTestCase
.endObject()
.bytes());
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer());
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer());
writer.addDocument(doc.rootDoc(), mapper.mappers().indexAnalyzer());
writer.addDocument(doc.rootDoc(), mapper.mappers().indexAnalyzer());
IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);

View File

@ -73,7 +73,7 @@ public class StoredNumericValuesTest extends ElasticsearchSingleNodeTest {
.endObject()
.bytes());
writer.addDocument(doc.rootDoc(), mapper.indexAnalyzer());
writer.addDocument(doc.rootDoc(), mapper.mappers().indexAnalyzer());
// Indexing a doc in the old way
FieldType fieldType = new FieldType();

View File

@ -47,7 +47,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(false));
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
}
@Test
@ -65,7 +65,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
ParsedDocument doc = docMapper.parse(SourceToParse.source(source).type("type").id("1"));
assertThat(doc.rootDoc().getField("_size").fieldType().stored(), equalTo(true));
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField("_size").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
}
@Test

View File

@ -85,7 +85,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().getField("_timestamp").fieldType().stored(), equalTo(true));
assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_timestamp").fieldType().indexOptions());
assertThat(doc.rootDoc().getField("_timestamp").tokenStream(docMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField("_timestamp").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
}
@Test

View File

@ -69,7 +69,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().getField("_ttl").fieldType().stored(), equalTo(true));
assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_ttl").fieldType().indexOptions());
assertThat(doc.rootDoc().getField("_ttl").tokenStream(docMapper.indexAnalyzer(), null), notNullValue());
assertThat(doc.rootDoc().getField("_ttl").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue());
}
@Test