mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Remove MapperAnalyzer
This commit is contained in:
parent
33339ab288
commit
542c25e78d
@ -52,20 +52,15 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
|
||||
return defaultAnalyzer;
|
||||
}
|
||||
|
||||
/** NOTE: public so MapperAnalyzer can invoke: */
|
||||
@Override
|
||||
public Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
return getAnalyzer(fieldName);
|
||||
}
|
||||
|
||||
private Analyzer getAnalyzer(String name) {
|
||||
Analyzer analyzer = analyzers.get(name);
|
||||
protected Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
Analyzer analyzer = analyzers.get(fieldName);
|
||||
if (analyzer != null) {
|
||||
return analyzer;
|
||||
}
|
||||
// Don't be lenient here and return the default analyzer
|
||||
// Fields need to be explicitly added
|
||||
throw new IllegalArgumentException("Field [" + name + "] has no associated analyzer");
|
||||
throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer");
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1,50 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
|
||||
|
||||
/** Hacky analyzer to dispatch per-thread based on the type of the current document being indexed, to look up the per-field Analyzer. Once
|
||||
* mappings are moved to the index level we can remove this. */
|
||||
public class MapperAnalyzer extends DelegatingAnalyzerWrapper {
|
||||
|
||||
private final MapperService mapperService;
|
||||
|
||||
/** Which type this thread is currently indexing. */
|
||||
private final ThreadLocal<String> threadTypes = new ThreadLocal<>();
|
||||
|
||||
public MapperAnalyzer(MapperService mapperService) {
|
||||
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
|
||||
this.mapperService = mapperService;
|
||||
}
|
||||
|
||||
/** Any thread that is about to use this analyzer for indexing must first set the type by calling this. */
|
||||
public void setType(String type) {
|
||||
threadTypes.set(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
// First get the FieldNameAnalyzer from the type, then ask it for the right analyzer for this field, or the default index analyzer:
|
||||
return ((FieldNameAnalyzer) mapperService.documentMapper(threadTypes.get()).mappers().indexAnalyzer()).getWrappedAnalyzer(fieldName);
|
||||
}
|
||||
}
|
@ -72,6 +72,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
||||
|
||||
@ -85,6 +86,23 @@ public class MapperService extends AbstractIndexComponent {
|
||||
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
|
||||
private static final Function<MappedFieldType, Analyzer> INDEX_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
||||
public Analyzer apply(MappedFieldType fieldType) {
|
||||
return fieldType.indexAnalyzer();
|
||||
}
|
||||
};
|
||||
private static final Function<MappedFieldType, Analyzer> SEARCH_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
||||
public Analyzer apply(MappedFieldType fieldType) {
|
||||
return fieldType.searchAnalyzer();
|
||||
}
|
||||
};
|
||||
private static final Function<MappedFieldType, Analyzer> SEARCH_QUOTE_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
|
||||
public Analyzer apply(MappedFieldType fieldType) {
|
||||
return fieldType.searchQuoteAnalyzer();
|
||||
}
|
||||
};
|
||||
|
||||
private final AnalysisService analysisService;
|
||||
private final IndexFieldDataService fieldDataService;
|
||||
|
||||
@ -110,8 +128,9 @@ public class MapperService extends AbstractIndexComponent {
|
||||
|
||||
private final DocumentMapperParser documentParser;
|
||||
|
||||
private final SmartIndexNameSearchAnalyzer searchAnalyzer;
|
||||
private final SmartIndexNameSearchQuoteAnalyzer searchQuoteAnalyzer;
|
||||
private final MapperAnalyzerWrapper indexAnalyzer;
|
||||
private final MapperAnalyzerWrapper searchAnalyzer;
|
||||
private final MapperAnalyzerWrapper searchQuoteAnalyzer;
|
||||
|
||||
private final List<DocumentTypeListener> typeListeners = new CopyOnWriteArrayList<>();
|
||||
|
||||
@ -128,8 +147,9 @@ public class MapperService extends AbstractIndexComponent {
|
||||
this.fieldDataService = fieldDataService;
|
||||
this.fieldTypes = new FieldTypeLookup();
|
||||
this.documentParser = new DocumentMapperParser(index, indexSettings, this, analysisService, similarityLookupService, scriptService);
|
||||
this.searchAnalyzer = new SmartIndexNameSearchAnalyzer(analysisService.defaultSearchAnalyzer());
|
||||
this.searchQuoteAnalyzer = new SmartIndexNameSearchQuoteAnalyzer(analysisService.defaultSearchQuoteAnalyzer());
|
||||
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), INDEX_ANALYZER_EXTRACTOR);
|
||||
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), SEARCH_ANALYZER_EXTRACTOR);
|
||||
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), SEARCH_QUOTE_ANALYZER_EXTRACTOR);
|
||||
|
||||
this.dynamic = indexSettings.getAsBoolean("index.mapper.dynamic", true);
|
||||
defaultPercolatorMappingSource = "{\n" +
|
||||
@ -558,6 +578,10 @@ public class MapperService extends AbstractIndexComponent {
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
public Analyzer indexAnalyzer() {
|
||||
return this.indexAnalyzer;
|
||||
}
|
||||
|
||||
public Analyzer searchAnalyzer() {
|
||||
return this.searchAnalyzer;
|
||||
}
|
||||
@ -604,39 +628,26 @@ public class MapperService extends AbstractIndexComponent {
|
||||
return META_FIELDS.contains(fieldName);
|
||||
}
|
||||
|
||||
final class SmartIndexNameSearchAnalyzer extends DelegatingAnalyzerWrapper {
|
||||
/** An analyzer wrapper that can lookup fields within the index mappings */
|
||||
final class MapperAnalyzerWrapper extends DelegatingAnalyzerWrapper {
|
||||
|
||||
private final Analyzer defaultAnalyzer;
|
||||
private final Function<MappedFieldType, Analyzer> extractAnalyzer;
|
||||
|
||||
SmartIndexNameSearchAnalyzer(Analyzer defaultAnalyzer) {
|
||||
MapperAnalyzerWrapper(Analyzer defaultAnalyzer, Function<MappedFieldType, Analyzer> extractAnalyzer) {
|
||||
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
|
||||
this.defaultAnalyzer = defaultAnalyzer;
|
||||
this.extractAnalyzer = extractAnalyzer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
MappedFieldType fieldType = smartNameFieldType(fieldName);
|
||||
if (fieldType != null && fieldType.searchAnalyzer() != null) {
|
||||
return fieldType.searchAnalyzer();
|
||||
}
|
||||
return defaultAnalyzer;
|
||||
}
|
||||
}
|
||||
|
||||
final class SmartIndexNameSearchQuoteAnalyzer extends DelegatingAnalyzerWrapper {
|
||||
|
||||
private final Analyzer defaultAnalyzer;
|
||||
|
||||
SmartIndexNameSearchQuoteAnalyzer(Analyzer defaultAnalyzer) {
|
||||
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
|
||||
this.defaultAnalyzer = defaultAnalyzer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Analyzer getWrappedAnalyzer(String fieldName) {
|
||||
MappedFieldType fieldType = smartNameFieldType(fieldName);
|
||||
if (fieldType != null && fieldType.searchQuoteAnalyzer() != null) {
|
||||
return fieldType.searchQuoteAnalyzer();
|
||||
if (fieldType != null) {
|
||||
Analyzer analyzer = extractAnalyzer.apply(fieldType);
|
||||
if (analyzer != null) {
|
||||
return analyzer;
|
||||
}
|
||||
}
|
||||
return defaultAnalyzer;
|
||||
}
|
||||
|
@ -177,8 +177,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
private final MeanMetric flushMetric = new MeanMetric();
|
||||
|
||||
private final ShardEngineFailListener failedEngineListener = new ShardEngineFailListener();
|
||||
|
||||
private final MapperAnalyzer mapperAnalyzer;
|
||||
private volatile boolean flushOnClose = true;
|
||||
|
||||
/**
|
||||
@ -235,7 +233,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
this.flushOnClose = indexSettings.getAsBoolean(INDEX_FLUSH_ON_CLOSE, true);
|
||||
this.nodeEnv = nodeEnv;
|
||||
indexSettingsService.addListener(applyRefreshSettings);
|
||||
this.mapperAnalyzer = new MapperAnalyzer(mapperService);
|
||||
this.path = path;
|
||||
this.mergePolicyConfig = new MergePolicyConfig(logger, indexSettings);
|
||||
/* create engine config */
|
||||
@ -466,7 +463,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
public void create(Engine.Create create) {
|
||||
writeAllowed(create.origin());
|
||||
create = indexingService.preCreate(create);
|
||||
mapperAnalyzer.setType(create.type());
|
||||
try {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("index [{}][{}]{}", create.type(), create.id(), create.docs());
|
||||
@ -505,7 +501,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
public boolean index(Engine.Index index) {
|
||||
writeAllowed(index.origin());
|
||||
index = indexingService.preIndex(index);
|
||||
mapperAnalyzer.setType(index.type());
|
||||
final boolean created;
|
||||
try {
|
||||
if (logger.isTraceEnabled()) {
|
||||
@ -1337,7 +1332,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
}
|
||||
|
||||
private final EngineConfig newEngineConfig(TranslogConfig translogConfig) {
|
||||
final TranslogRecoveryPerformer translogRecoveryPerformer = new TranslogRecoveryPerformer(shardId, mapperService, mapperAnalyzer, queryParserService, indexAliasesService, indexCache) {
|
||||
final TranslogRecoveryPerformer translogRecoveryPerformer = new TranslogRecoveryPerformer(shardId, mapperService, queryParserService, indexAliasesService, indexCache) {
|
||||
@Override
|
||||
protected void operationProcessed() {
|
||||
assert recoveryState != null;
|
||||
@ -1346,7 +1341,7 @@ public class IndexShard extends AbstractIndexShardComponent {
|
||||
};
|
||||
return new EngineConfig(shardId,
|
||||
threadPool, indexingService, indexSettingsService, warmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig,
|
||||
mapperAnalyzer, similarityService.similarity(), codecService, failedEngineListener, translogRecoveryPerformer, indexCache.filter(), indexCache.filterPolicy(), translogConfig);
|
||||
mapperService.indexAnalyzer(), similarityService.similarity(), codecService, failedEngineListener, translogRecoveryPerformer, indexCache.filter(), indexCache.filterPolicy(), translogConfig);
|
||||
}
|
||||
|
||||
private static class IndexShardOperationCounter extends AbstractRefCounted {
|
||||
|
@ -53,17 +53,15 @@ public class TranslogRecoveryPerformer {
|
||||
private final IndexQueryParserService queryParserService;
|
||||
private final IndexAliasesService indexAliasesService;
|
||||
private final IndexCache indexCache;
|
||||
private final MapperAnalyzer mapperAnalyzer;
|
||||
private final Map<String, Mapping> recoveredTypes = new HashMap<>();
|
||||
private final ShardId shardId;
|
||||
|
||||
protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, MapperAnalyzer mapperAnalyzer, IndexQueryParserService queryParserService, IndexAliasesService indexAliasesService, IndexCache indexCache) {
|
||||
protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, IndexQueryParserService queryParserService, IndexAliasesService indexAliasesService, IndexCache indexCache) {
|
||||
this.shardId = shardId;
|
||||
this.mapperService = mapperService;
|
||||
this.queryParserService = queryParserService;
|
||||
this.indexAliasesService = indexAliasesService;
|
||||
this.indexCache = indexCache;
|
||||
this.mapperAnalyzer = mapperAnalyzer;
|
||||
}
|
||||
|
||||
protected Tuple<DocumentMapper, Mapping> docMapper(String type) {
|
||||
@ -136,7 +134,6 @@ public class TranslogRecoveryPerformer {
|
||||
source(create.source()).type(create.type()).id(create.id())
|
||||
.routing(create.routing()).parent(create.parent()).timestamp(create.timestamp()).ttl(create.ttl()),
|
||||
create.version(), create.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, true, false);
|
||||
mapperAnalyzer.setType(create.type()); // this is a PITA - once mappings are per index not per type this can go away an we can just simply move this to the engine eventually :)
|
||||
maybeAddMappingUpdate(engineCreate.type(), engineCreate.parsedDoc().dynamicMappingsUpdate(), engineCreate.id(), allowMappingUpdates);
|
||||
engine.create(engineCreate);
|
||||
break;
|
||||
@ -145,7 +142,6 @@ public class TranslogRecoveryPerformer {
|
||||
Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), source(index.source()).type(index.type()).id(index.id())
|
||||
.routing(index.routing()).parent(index.parent()).timestamp(index.timestamp()).ttl(index.ttl()),
|
||||
index.version(), index.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, true);
|
||||
mapperAnalyzer.setType(index.type());
|
||||
maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates);
|
||||
engine.index(engineIndex);
|
||||
break;
|
||||
|
@ -1825,7 +1825,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
|
||||
public final AtomicInteger recoveredOps = new AtomicInteger(0);
|
||||
|
||||
public TranslogHandler(String indexName) {
|
||||
super(new ShardId("test", 0), null, new MapperAnalyzer(null), null, null, null);
|
||||
super(new ShardId("test", 0), null, null, null, null);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
|
||||
Index index = new Index(indexName);
|
||||
|
@ -45,12 +45,8 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
|
||||
@Test
|
||||
public void test1Merge() throws Exception {
|
||||
|
||||
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
||||
@ -79,7 +75,6 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeObjectDynamic() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string();
|
||||
@ -95,7 +90,6 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeObjectAndNested() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
@ -117,7 +111,6 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeSearchAnalyzer() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
@ -137,7 +130,6 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChangeSearchAnalyzerToDefault() throws Exception {
|
||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
@ -165,7 +157,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
|
||||
DocumentFieldMappers dfm = documentMapper.mappers();
|
||||
try {
|
||||
((FieldNameAnalyzer) dfm.indexAnalyzer()).getWrappedAnalyzer("non_existing_field");
|
||||
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// ok that's expected
|
||||
@ -207,7 +199,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
||||
continue;
|
||||
}
|
||||
dfm = documentMapper.mappers();
|
||||
((FieldNameAnalyzer) dfm.indexAnalyzer()).getWrappedAnalyzer(fieldName);
|
||||
assertNotNull(dfm.indexAnalyzer().tokenStream(fieldName, "foo"));
|
||||
}
|
||||
} finally {
|
||||
stopped.set(true);
|
||||
|
Loading…
x
Reference in New Issue
Block a user