mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-26 06:46:10 +00:00
Remove deprecated smartcn_word and smartcn_sentence
were deprecated in 2.2.0 by #22. Closes #24. (cherry picked from commit 2bab6e0)
This commit is contained in:
parent
26c1f4c43e
commit
f4d0d27903
@ -30,14 +30,6 @@ public class SmartChineseAnalysisBinderProcessor extends AnalysisModule.Analysis
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void processTokenizers(TokenizersBindings tokenizersBindings) {
|
public void processTokenizers(TokenizersBindings tokenizersBindings) {
|
||||||
// TODO Remove it in 2.3.0 (was deprecated: see https://github.com/elasticsearch/elasticsearch-analysis-smartcn/issues/22)
|
|
||||||
tokenizersBindings.processTokenizer("smartcn_sentence", SmartChineseSentenceTokenizerFactory.class);
|
|
||||||
tokenizersBindings.processTokenizer("smartcn_tokenizer", SmartChineseTokenizerTokenizerFactory.class);
|
tokenizersBindings.processTokenizer("smartcn_tokenizer", SmartChineseTokenizerTokenizerFactory.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void processTokenFilters(TokenFiltersBindings tokenFiltersBindings) {
|
|
||||||
// TODO Remove it in 2.3.0 (was deprecated: see https://github.com/elasticsearch/elasticsearch-analysis-smartcn/issues/22)
|
|
||||||
tokenFiltersBindings.processTokenFilter("smartcn_word", SmartChineseWordTokenFilterFactory.class);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,47 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Tokenizer;
|
|
||||||
import org.apache.lucene.analysis.cn.smart.SentenceTokenizer;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.index.Index;
|
|
||||||
import org.elasticsearch.index.settings.IndexSettings;
|
|
||||||
|
|
||||||
import java.io.Reader;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SentenceTokenizer has been deprecated in Lucene 4.8
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public class SmartChineseSentenceTokenizerFactory extends AbstractTokenizerFactory {
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SmartChineseSentenceTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
|
||||||
super(index, indexSettings, name, settings);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Tokenizer create(Reader reader) {
|
|
||||||
return new SentenceTokenizer(reader);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
|
||||||
import org.apache.lucene.analysis.cn.smart.WordTokenFilter;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
|
||||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.index.Index;
|
|
||||||
import org.elasticsearch.index.settings.IndexSettings;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* WordTokenFilter has been deprecated in Lucene 4.8
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public class SmartChineseWordTokenFilterFactory extends AbstractTokenFilterFactory {
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public SmartChineseWordTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
|
||||||
super(index, indexSettings, name, settings);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TokenStream create(TokenStream tokenStream) {
|
|
||||||
return new WordTokenFilter(tokenStream);
|
|
||||||
}
|
|
||||||
}
|
|
@ -19,17 +19,17 @@
|
|||||||
|
|
||||||
package org.elasticsearch.indices.analysis.smartcn;
|
package org.elasticsearch.indices.analysis.smartcn;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
|
||||||
import org.apache.lucene.analysis.Tokenizer;
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
import org.apache.lucene.analysis.cn.smart.HMMChineseTokenizer;
|
import org.apache.lucene.analysis.cn.smart.HMMChineseTokenizer;
|
||||||
import org.apache.lucene.analysis.cn.smart.SentenceTokenizer;
|
|
||||||
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
|
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
|
||||||
import org.apache.lucene.analysis.cn.smart.WordTokenFilter;
|
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
import org.elasticsearch.common.lucene.Lucene;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.analysis.*;
|
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||||
|
import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory;
|
||||||
|
import org.elasticsearch.index.analysis.PreBuiltTokenizerFactoryFactory;
|
||||||
|
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
|
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
@ -47,32 +47,7 @@ public class SmartChineseIndicesAnalysis extends AbstractComponent {
|
|||||||
// Register smartcn analyzer
|
// Register smartcn analyzer
|
||||||
indicesAnalysisService.analyzerProviderFactories().put("smartcn", new PreBuiltAnalyzerProviderFactory("smartcn", AnalyzerScope.INDICES, new SmartChineseAnalyzer(Lucene.ANALYZER_VERSION)));
|
indicesAnalysisService.analyzerProviderFactories().put("smartcn", new PreBuiltAnalyzerProviderFactory("smartcn", AnalyzerScope.INDICES, new SmartChineseAnalyzer(Lucene.ANALYZER_VERSION)));
|
||||||
|
|
||||||
// Register smartcn_word token filter
|
// Register smartcn_tokenizer tokenizer
|
||||||
// TODO Remove it in 2.3.0 (was deprecated: see https://github.com/elasticsearch/elasticsearch-analysis-smartcn/issues/22)
|
|
||||||
indicesAnalysisService.tokenFilterFactories().put("smartcn_word", new PreBuiltTokenFilterFactoryFactory(new TokenFilterFactory() {
|
|
||||||
@Override public String name() {
|
|
||||||
return "smartcn_word";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override public TokenStream create(TokenStream tokenStream) {
|
|
||||||
return new WordTokenFilter(tokenStream);
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Register smartcn_sentence tokenizer
|
|
||||||
indicesAnalysisService.tokenizerFactories().put("smartcn_sentence", new PreBuiltTokenizerFactoryFactory(new TokenizerFactory() {
|
|
||||||
@Override
|
|
||||||
public String name() {
|
|
||||||
return "smartcn_sentence";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Tokenizer create(Reader reader) {
|
|
||||||
return new SentenceTokenizer(reader);
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Register smartcn_sentence tokenizer
|
|
||||||
indicesAnalysisService.tokenizerFactories().put("smartcn_tokenizer", new PreBuiltTokenizerFactoryFactory(new TokenizerFactory() {
|
indicesAnalysisService.tokenizerFactories().put("smartcn_tokenizer", new PreBuiltTokenizerFactoryFactory(new TokenizerFactory() {
|
||||||
@Override
|
@Override
|
||||||
public String name() {
|
public String name() {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user