add russian letter tokenizer
This commit is contained in:
parent
49b4659322
commit
d0f5bc3403
|
@ -25,7 +25,7 @@ import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.settings.IndexSettings;
|
import org.elasticsearch.index.settings.IndexSettings;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author kimchy (Shay Banon)
|
* @author kimchy (shay.banon)
|
||||||
*/
|
*/
|
||||||
public abstract class AbstractTokenizerFactory extends AbstractIndexComponent implements TokenizerFactory {
|
public abstract class AbstractTokenizerFactory extends AbstractIndexComponent implements TokenizerFactory {
|
||||||
|
|
||||||
|
|
|
@ -305,6 +305,8 @@ public class AnalysisModule extends AbstractModule {
|
||||||
tokenizersBindings.processTokenizer("letter", LetterTokenizerFactory.class);
|
tokenizersBindings.processTokenizer("letter", LetterTokenizerFactory.class);
|
||||||
tokenizersBindings.processTokenizer("lowercase", LowerCaseTokenizerFactory.class);
|
tokenizersBindings.processTokenizer("lowercase", LowerCaseTokenizerFactory.class);
|
||||||
tokenizersBindings.processTokenizer("whitespace", WhitespaceTokenizerFactory.class);
|
tokenizersBindings.processTokenizer("whitespace", WhitespaceTokenizerFactory.class);
|
||||||
|
tokenizersBindings.processTokenizer("russian_letter", RussianLetterTokenizerFactory.class);
|
||||||
|
tokenizersBindings.processTokenizer("russianLetter", RussianLetterTokenizerFactory.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public void processAnalyzers(AnalyzersBindings analyzersBindings) {
|
@Override public void processAnalyzers(AnalyzersBindings analyzersBindings) {
|
||||||
|
@ -347,7 +349,7 @@ public class AnalysisModule extends AbstractModule {
|
||||||
analyzersBindings.processAnalyzer("arabic", ArabicAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("arabic", ArabicAnalyzerProvider.class);
|
||||||
analyzersBindings.processAnalyzer("brazilian", BrazilianAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("brazilian", BrazilianAnalyzerProvider.class);
|
||||||
analyzersBindings.processAnalyzer("chinese", ChineseAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("chinese", ChineseAnalyzerProvider.class);
|
||||||
analyzersBindings.processAnalyzer("cjk", ChineseAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("cjk", CjkAnalyzerProvider.class);
|
||||||
analyzersBindings.processAnalyzer("czech", CzechAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("czech", CzechAnalyzerProvider.class);
|
||||||
analyzersBindings.processAnalyzer("dutch", DutchAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("dutch", DutchAnalyzerProvider.class);
|
||||||
analyzersBindings.processAnalyzer("french", FrenchAnalyzerProvider.class);
|
analyzersBindings.processAnalyzer("french", FrenchAnalyzerProvider.class);
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elastic Search and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. Elastic Search licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
|
import org.apache.lucene.analysis.ru.RussianLetterTokenizer;
|
||||||
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.settings.IndexSettings;
|
||||||
|
|
||||||
|
import java.io.Reader;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author kimchy (shay.banon)
|
||||||
|
*/
|
||||||
|
public class RussianLetterTokenizerFactory extends AbstractTokenizerFactory {
|
||||||
|
|
||||||
|
@Inject public RussianLetterTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||||
|
super(index, indexSettings, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public Tokenizer create(Reader reader) {
|
||||||
|
return new RussianLetterTokenizer(reader);
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue