Correct spelling of AnalysisPlugin#requriesAnalysisSettings (#32025)

Because this is a static method on a public API, and one that we encourage
plugin authors to use, the method with the typo is deprecated in 6.x
rather than just renamed.
This commit is contained in:
Alan Woodward 2018-07-13 13:13:21 +01:00 committed by GitHub
parent e31a877a64
commit a01e26a39b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 35 additions and 20 deletions

View File

@ -18,3 +18,8 @@ See {plugins}/repository-azure-repository-settings.html#repository-azure-reposit
must now be specified in the client settings instead.
See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings].
==== Analysis Plugin changes
* The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider<T> provider)` has been
renamed to `requiresAnalysisSettings`

View File

@ -135,7 +135,7 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
@ -201,11 +201,11 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
filters.put("cjk_width", CJKWidthFilterFactory::new);
filters.put("classic", ClassicFilterFactory::new);
filters.put("czech_stem", CzechStemTokenFilterFactory::new);
filters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new));
filters.put("common_grams", requiresAnalysisSettings(CommonGramsTokenFilterFactory::new));
filters.put("decimal_digit", DecimalDigitFilterFactory::new);
filters.put("delimited_payload_filter", LegacyDelimitedPayloadTokenFilterFactory::new);
filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new);
filters.put("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
filters.put("edgeNGram", EdgeNGramTokenFilterFactory::new);
@ -216,11 +216,11 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
filters.put("german_normalization", GermanNormalizationFilterFactory::new);
filters.put("german_stem", GermanStemTokenFilterFactory::new);
filters.put("hindi_normalization", HindiNormalizationFilterFactory::new);
filters.put("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
filters.put("hyphenation_decompounder", requiresAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
filters.put("indic_normalization", IndicNormalizationFilterFactory::new);
filters.put("keep", requriesAnalysisSettings(KeepWordFilterFactory::new));
filters.put("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new));
filters.put("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
filters.put("keep", requiresAnalysisSettings(KeepWordFilterFactory::new));
filters.put("keep_types", requiresAnalysisSettings(KeepTypesFilterFactory::new));
filters.put("keyword_marker", requiresAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
filters.put("kstem", KStemTokenFilterFactory::new);
filters.put("length", LengthTokenFilterFactory::new);
filters.put("limit", LimitTokenCountFilterFactory::new);
@ -229,8 +229,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
filters.put("ngram", NGramTokenFilterFactory::new);
filters.put("nGram", NGramTokenFilterFactory::new);
filters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new));
filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new));
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
filters.put("porter_stem", PorterStemTokenFilterFactory::new);
filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new);
@ -241,10 +241,10 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
filters.put("serbian_normalization", SerbianNormalizationFilterFactory::new);
filters.put("snowball", SnowballTokenFilterFactory::new);
filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new);
filters.put("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
filters.put("stemmer", StemmerTokenFilterFactory::new);
filters.put("trim", TrimTokenFilterFactory::new);
filters.put("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new));
filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new));
filters.put("unique", UniqueTokenFilterFactory::new);
filters.put("uppercase", UpperCaseTokenFilterFactory::new);
filters.put("word_delimiter_graph", WordDelimiterGraphTokenFilterFactory::new);
@ -256,8 +256,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
Map<String, AnalysisProvider<CharFilterFactory>> filters = new TreeMap<>();
filters.put("html_strip", HtmlStripCharFilterFactory::new);
filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new));
filters.put("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new));
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceCharFilterFactory::new));
filters.put("mapping", requiresAnalysisSettings(MappingCharFilterFactory::new));
return filters;
}

View File

@ -54,7 +54,7 @@ import java.util.Locale;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
/**
* Sets up {@link AnalysisRegistry}.
@ -118,7 +118,7 @@ public final class AnalysisModule {
tokenFilters.register("stop", StopTokenFilterFactory::new);
tokenFilters.register("standard", StandardTokenFilterFactory::new);
tokenFilters.register("shingle", ShingleTokenFilterFactory::new);
tokenFilters.register("hunspell", requriesAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory
tokenFilters.register("hunspell", requiresAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory
(indexSettings, name, settings, hunspellService)));
tokenFilters.extractAndRegister(plugins, AnalysisPlugin::getTokenFilters);

View File

@ -57,12 +57,12 @@ import static java.util.Collections.emptyMap;
* }</pre>
*
* Elasticsearch doesn't have any automatic mechanism to share these components between indexes. If any component is heavy enough to warrant
* such sharing then it is the Pugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing
* such sharing then it is the Plugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing
* this unless absolutely necessary because it can be difficult to get the caching right given things like behavior changes across versions.
*/
public interface AnalysisPlugin {
/**
* Override to add additional {@link CharFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
* Override to add additional {@link CharFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
@ -70,7 +70,7 @@ public interface AnalysisPlugin {
}
/**
* Override to add additional {@link TokenFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
* Override to add additional {@link TokenFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
@ -78,7 +78,7 @@ public interface AnalysisPlugin {
}
/**
* Override to add additional {@link Tokenizer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
* Override to add additional {@link Tokenizer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
@ -86,7 +86,7 @@ public interface AnalysisPlugin {
}
/**
* Override to add additional {@link Analyzer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
* Override to add additional {@link Analyzer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
* how to on get the configuration from the index.
*/
default Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
@ -130,8 +130,18 @@ public interface AnalysisPlugin {
/**
* Mark an {@link AnalysisProvider} as requiring the index's settings.
*
* @deprecated use {@link #requiresAnalysisSettings(AnalysisProvider)}
*/
@Deprecated
static <T> AnalysisProvider<T> requriesAnalysisSettings(AnalysisProvider<T> provider) {
return requiresAnalysisSettings(provider);
}
/**
* Mark an {@link AnalysisProvider} as requiring the index's settings.
*/
static <T> AnalysisProvider<T> requiresAnalysisSettings(AnalysisProvider<T> provider) {
return new AnalysisProvider<T>() {
@Override
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {