Skip to content

Commit a01e26a

Browse files
authored
Correct spelling of AnalysisPlugin#requriesAnalysisSettings (#32025)
Because this is a static method on a public API, and one that we encourage plugin authors to use, the method with the typo is deprecated in 6.x rather than just renamed.
1 parent e31a877 commit a01e26a

File tree

4 files changed

+35
-20
lines changed

4 files changed

+35
-20
lines changed

docs/reference/migration/migrate_7_0/plugins.asciidoc

+5
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,8 @@ See {plugins}/repository-azure-repository-settings.html#repository-azure-reposit
1818
must now be specified in the client settings instead.
1919

2020
See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings].
21+
22+
==== Analysis Plugin changes
23+
24+
* The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider<T> provider)` has been
25+
renamed to `requiresAnalysisSettings`

modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java

+13-13
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@
135135
import java.util.Map;
136136
import java.util.TreeMap;
137137

138-
import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
138+
import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
139139

140140
public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
141141

@@ -201,11 +201,11 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
201201
filters.put("cjk_width", CJKWidthFilterFactory::new);
202202
filters.put("classic", ClassicFilterFactory::new);
203203
filters.put("czech_stem", CzechStemTokenFilterFactory::new);
204-
filters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new));
204+
filters.put("common_grams", requiresAnalysisSettings(CommonGramsTokenFilterFactory::new));
205205
filters.put("decimal_digit", DecimalDigitFilterFactory::new);
206206
filters.put("delimited_payload_filter", LegacyDelimitedPayloadTokenFilterFactory::new);
207207
filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new);
208-
filters.put("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
208+
filters.put("dictionary_decompounder", requiresAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
209209
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
210210
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
211211
filters.put("edgeNGram", EdgeNGramTokenFilterFactory::new);
@@ -216,11 +216,11 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
216216
filters.put("german_normalization", GermanNormalizationFilterFactory::new);
217217
filters.put("german_stem", GermanStemTokenFilterFactory::new);
218218
filters.put("hindi_normalization", HindiNormalizationFilterFactory::new);
219-
filters.put("hyphenation_decompounder", requriesAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
219+
filters.put("hyphenation_decompounder", requiresAnalysisSettings(HyphenationCompoundWordTokenFilterFactory::new));
220220
filters.put("indic_normalization", IndicNormalizationFilterFactory::new);
221-
filters.put("keep", requriesAnalysisSettings(KeepWordFilterFactory::new));
222-
filters.put("keep_types", requriesAnalysisSettings(KeepTypesFilterFactory::new));
223-
filters.put("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
221+
filters.put("keep", requiresAnalysisSettings(KeepWordFilterFactory::new));
222+
filters.put("keep_types", requiresAnalysisSettings(KeepTypesFilterFactory::new));
223+
filters.put("keyword_marker", requiresAnalysisSettings(KeywordMarkerTokenFilterFactory::new));
224224
filters.put("kstem", KStemTokenFilterFactory::new);
225225
filters.put("length", LengthTokenFilterFactory::new);
226226
filters.put("limit", LimitTokenCountFilterFactory::new);
@@ -229,8 +229,8 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
229229
filters.put("multiplexer", MultiplexerTokenFilterFactory::new);
230230
filters.put("ngram", NGramTokenFilterFactory::new);
231231
filters.put("nGram", NGramTokenFilterFactory::new);
232-
filters.put("pattern_capture", requriesAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
233-
filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new));
232+
filters.put("pattern_capture", requiresAnalysisSettings(PatternCaptureGroupTokenFilterFactory::new));
233+
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new));
234234
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
235235
filters.put("porter_stem", PorterStemTokenFilterFactory::new);
236236
filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new);
@@ -241,10 +241,10 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
241241
filters.put("serbian_normalization", SerbianNormalizationFilterFactory::new);
242242
filters.put("snowball", SnowballTokenFilterFactory::new);
243243
filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new);
244-
filters.put("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
244+
filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
245245
filters.put("stemmer", StemmerTokenFilterFactory::new);
246246
filters.put("trim", TrimTokenFilterFactory::new);
247-
filters.put("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new));
247+
filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new));
248248
filters.put("unique", UniqueTokenFilterFactory::new);
249249
filters.put("uppercase", UpperCaseTokenFilterFactory::new);
250250
filters.put("word_delimiter_graph", WordDelimiterGraphTokenFilterFactory::new);
@@ -256,8 +256,8 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
256256
public Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
257257
Map<String, AnalysisProvider<CharFilterFactory>> filters = new TreeMap<>();
258258
filters.put("html_strip", HtmlStripCharFilterFactory::new);
259-
filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceCharFilterFactory::new));
260-
filters.put("mapping", requriesAnalysisSettings(MappingCharFilterFactory::new));
259+
filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceCharFilterFactory::new));
260+
filters.put("mapping", requiresAnalysisSettings(MappingCharFilterFactory::new));
261261
return filters;
262262
}
263263

server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
import java.util.Map;
5555

5656
import static java.util.Collections.unmodifiableMap;
57-
import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
57+
import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
5858

5959
/**
6060
* Sets up {@link AnalysisRegistry}.
@@ -118,7 +118,7 @@ private NamedRegistry<AnalysisProvider<TokenFilterFactory>> setupTokenFilters(Li
118118
tokenFilters.register("stop", StopTokenFilterFactory::new);
119119
tokenFilters.register("standard", StandardTokenFilterFactory::new);
120120
tokenFilters.register("shingle", ShingleTokenFilterFactory::new);
121-
tokenFilters.register("hunspell", requriesAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory
121+
tokenFilters.register("hunspell", requiresAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory
122122
(indexSettings, name, settings, hunspellService)));
123123

124124
tokenFilters.extractAndRegister(plugins, AnalysisPlugin::getTokenFilters);

server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java

+15-5
Original file line numberDiff line numberDiff line change
@@ -57,36 +57,36 @@
5757
* }</pre>
5858
*
5959
* Elasticsearch doesn't have any automatic mechanism to share these components between indexes. If any component is heavy enough to warrant
60-
* such sharing then it is the Pugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing
60+
* such sharing then it is the Plugin's responsibility to do it in their {@link AnalysisProvider} implementation. We recommend against doing
6161
* this unless absolutely necessary because it can be difficult to get the caching right given things like behavior changes across versions.
6262
*/
6363
public interface AnalysisPlugin {
6464
/**
65-
* Override to add additional {@link CharFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
65+
* Override to add additional {@link CharFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
6666
* how to on get the configuration from the index.
6767
*/
6868
default Map<String, AnalysisProvider<CharFilterFactory>> getCharFilters() {
6969
return emptyMap();
7070
}
7171

7272
/**
73-
* Override to add additional {@link TokenFilter}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
73+
* Override to add additional {@link TokenFilter}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
7474
* how to on get the configuration from the index.
7575
*/
7676
default Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
7777
return emptyMap();
7878
}
7979

8080
/**
81-
* Override to add additional {@link Tokenizer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
81+
* Override to add additional {@link Tokenizer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
8282
* how to on get the configuration from the index.
8383
*/
8484
default Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
8585
return emptyMap();
8686
}
8787

8888
/**
89-
* Override to add additional {@link Analyzer}s. See {@link #requriesAnalysisSettings(AnalysisProvider)}
89+
* Override to add additional {@link Analyzer}s. See {@link #requiresAnalysisSettings(AnalysisProvider)}
9090
* how to on get the configuration from the index.
9191
*/
9292
default Map<String, AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() {
@@ -130,8 +130,18 @@ default Map<String, org.apache.lucene.analysis.hunspell.Dictionary> getHunspellD
130130

131131
/**
132132
* Mark an {@link AnalysisProvider} as requiring the index's settings.
133+
*
134+
* @deprecated use {@link #requiresAnalysisSettings(AnalysisProvider)}
133135
*/
136+
@Deprecated
134137
static <T> AnalysisProvider<T> requriesAnalysisSettings(AnalysisProvider<T> provider) {
138+
return requiresAnalysisSettings(provider);
139+
}
140+
141+
/**
142+
* Mark an {@link AnalysisProvider} as requiring the index's settings.
143+
*/
144+
static <T> AnalysisProvider<T> requiresAnalysisSettings(AnalysisProvider<T> provider) {
135145
return new AnalysisProvider<T>() {
136146
@Override
137147
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {

0 commit comments

Comments
 (0)