From 370f0b885e347c7659f60e78865ca40350b46ced Mon Sep 17 00:00:00 2001 From: Jun Ohtani Date: Thu, 22 Sep 2016 20:54:30 +0900 Subject: [PATCH] Removing request parameters in _analyze API Remove request params in _analyze API without index param Change rest-api-test using JSON Change docs using JSON Closes #20246 --- .../admin/indices/RestAnalyzeAction.java | 32 +---------- .../admin/indices/RestAnalyzeActionTests.java | 12 ++++- docs/plugins/analysis-icu.asciidoc | 15 ++++-- docs/plugins/analysis-kuromoji.asciidoc | 54 +++++++++++++++---- docs/plugins/analysis-phonetic.asciidoc | 6 ++- docs/reference/indices/analyze.asciidoc | 15 ------ .../mapping/params/analyzer.asciidoc | 6 ++- .../test/analysis_icu/10_basic.yaml | 26 +++++---- .../test/analysis_kuromoji/10_basic.yaml | 31 ++++++----- .../test/analysis_phonetic/10_metaphone.yaml | 5 +- .../20_double_metaphone.yaml | 5 +- .../analysis_phonetic/30_beider_morse.yaml | 5 +- .../analysis_phonetic/50_daitch_mokotoff.yaml | 5 +- .../test/analysis_smartcn/10_basic.yaml | 10 ++-- .../test/analysis_stempel/10_basic.yaml | 12 +++-- .../test/indices.analyze/10_analyze.yaml | 23 ++++---- 16 files changed, 144 insertions(+), 118 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 247df1a380e..02ac7201fb3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -67,42 +67,14 @@ public class RestAnalyzeAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - String[] texts = request.paramAsStringArrayOrEmptyIfAll("text"); - AnalyzeRequest analyzeRequest = new AnalyzeRequest(request.param("index")); - analyzeRequest.text(texts); - analyzeRequest.analyzer(request.param("analyzer")); - analyzeRequest.field(request.param("field")); - final String tokenizer = request.param("tokenizer"); - if (tokenizer != null) { - analyzeRequest.tokenizer(tokenizer); - } - for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) { - analyzeRequest.addTokenFilter(filter); - } - for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) { - analyzeRequest.addTokenFilter(charFilter); - } - analyzeRequest.explain(request.paramAsBoolean("explain", false)); - analyzeRequest.attributes(request.paramAsStringArray("attributes", analyzeRequest.attributes())); - if (RestActions.hasBodyContent(request)) { - XContentType type = RestActions.guessBodyContentType(request); - if (type == null) { - if (texts == null || texts.length == 0) { - texts = new String[]{ RestActions.getRestContent(request).utf8ToString() }; - analyzeRequest.text(texts); - } - } else { - // NOTE: if rest request with xcontent body has request parameters, the parameters does not override xcontent values - buildFromContent(RestActions.getRestContent(request), analyzeRequest, parseFieldMatcher); - } - } + buildFromContent(RestActions.getRestContent(request), analyzeRequest, parseFieldMatcher); return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel)); } - public static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) { + static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) { try (XContentParser parser = XContentHelper.createParser(content)) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("Malformed content, must start with an object"); diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 9b7d4073d0d..980a76d12cb 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -19,16 +19,25 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import java.util.HashMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; +import static org.mockito.Mockito.doCallRealMethod; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; public class RestAnalyzeActionTests extends ESTestCase { @@ -118,7 +127,7 @@ public class RestAnalyzeActionTests extends ESTestCase { assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); } - public void testDeprecatedParamException() throws Exception { + public void testDeprecatedParamIn2xException() throws Exception { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent( XContentFactory.jsonBuilder() @@ -165,5 +174,4 @@ public class RestAnalyzeActionTests extends ESTestCase { , new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); } - } diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index 1677634bb56..5ea55d7437d 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -164,7 +164,11 @@ PUT icu_sample } } -POST icu_sample/_analyze?analyzer=my_analyzer&text=Elasticsearch. Wow! +POST icu_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "Elasticsearch. Wow!" +} -------------------------------------------------- // CONSOLE @@ -480,18 +484,21 @@ PUT icu_sample } } -GET icu_sample/_analyze?analyzer=latin +GET icu_sample/_analyze { + "analyzer": "latin", "text": "你好" <2> } -GET icu_sample/_analyze?analyzer=latin +GET icu_sample/_analyze { + "analyzer": "latin", "text": "здравствуйте" <3> } -GET icu_sample/_analyze?analyzer=latin +GET icu_sample/_analyze { + "analyzer": "latin", "text": "こんにちは" <4> } diff --git a/docs/plugins/analysis-kuromoji.asciidoc b/docs/plugins/analysis-kuromoji.asciidoc index 6b3dc0a72f1..44eab2f2329 100644 --- a/docs/plugins/analysis-kuromoji.asciidoc +++ b/docs/plugins/analysis-kuromoji.asciidoc @@ -175,7 +175,11 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=東京スカイツリー +POST kuromoji_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "東京スカイツリー" +} -------------------------------------------------- // CONSOLE @@ -228,7 +232,11 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=飲み +POST kuromoji_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "飲み" +} -------------------------------------------------- // CONSOLE @@ -290,7 +298,11 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=寿司がおいしいね +POST kuromoji_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "寿司がおいしいね" +} -------------------------------------------------- // CONSOLE @@ -363,9 +375,17 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=katakana_analyzer&text=寿司 <1> +POST kuromoji_sample/_analyze +{ + "analyzer": "katakana_analyzer", + "text": "寿司" <1> +} -POST kuromoji_sample/_analyze?analyzer=romaji_analyzer&text=寿司 <2> +POST kuromoji_sample/_analyze +{ + "analyzer": "romaji_analyzer", + "text": "寿司" <2> +} -------------------------------------------------- // CONSOLE @@ -413,9 +433,17 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=コピー <1> +POST kuromoji_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "コピー" <1> +} -POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=サーバー <2> +POST kuromoji_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "サーバー" <2> +} -------------------------------------------------- // CONSOLE @@ -461,7 +489,11 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=analyzer_with_ja_stop&text=ストップは消える +POST kuromoji_sample/_analyze +{ + "analyzer": "analyzer_with_ja_stop", + "text": "ストップは消える" +} -------------------------------------------------- // CONSOLE @@ -507,7 +539,11 @@ PUT kuromoji_sample } } -POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=一〇〇〇 +POST kuromoji_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "一〇〇〇" +} -------------------------------------------------- // CONSOLE diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index 0544900a8ca..fffbfcbd103 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -82,7 +82,11 @@ PUT phonetic_sample } } -POST phonetic_sample/_analyze?analyzer=my_analyzer&text=Joe Bloggs <1> +POST phonetic_sample/_analyze +{ + "analyzer": "my_analyzer", + "text": "Joe Bloggs" <1> +} -------------------------------------------------- // CONSOLE diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index dbb2c8f101a..0d9d60d4845 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -100,21 +100,6 @@ curl -XGET 'localhost:9200/test/_analyze' -d ' Will cause the analysis to happen based on the analyzer configured in the mapping for `obj1.field1` (and if not, the default index analyzer). -All parameters can also supplied as request parameters. For example: - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&text=this+is+a+test' --------------------------------------------------- - -For backwards compatibility, we also accept the text parameter as the body of the request, -provided it doesn't start with `{` : - -[source,js] --------------------------------------------------- -curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&char_filter=html_strip' -d 'this is a test' --------------------------------------------------- - === Explain Analyze If you want to get more advanced details, set `explain` to `true` (defaults to `false`). It will output all token attributes for each token. diff --git a/docs/reference/mapping/params/analyzer.asciidoc b/docs/reference/mapping/params/analyzer.asciidoc index c075b662805..0b60451e02a 100644 --- a/docs/reference/mapping/params/analyzer.asciidoc +++ b/docs/reference/mapping/params/analyzer.asciidoc @@ -60,13 +60,15 @@ PUT /my_index } } -GET my_index/_analyze?field=text <3> +GET my_index/_analyze <3> { + "field": "text", "text": "The quick Brown Foxes." } -GET my_index/_analyze?field=text.english <4> +GET my_index/_analyze <4> { + "field": "text.english", "text": "The quick Brown Foxes." } -------------------------------------------------- diff --git a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yaml b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yaml index 64fbbcadf7d..180f6c6f5b6 100644 --- a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yaml +++ b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/10_basic.yaml @@ -3,8 +3,9 @@ "Tokenizer": - do: indices.analyze: - text: Foo Bar - tokenizer: icu_tokenizer + body: + text: Foo Bar + tokenizer: icu_tokenizer - length: { tokens: 2 } - match: { tokens.0.token: Foo } - match: { tokens.1.token: Bar } @@ -12,26 +13,29 @@ "Normalization filter": - do: indices.analyze: - filter: icu_normalizer - text: Foo Bar Ruß - tokenizer: keyword + body: + filter: [icu_normalizer] + text: Foo Bar Ruß + tokenizer: keyword - length: { tokens: 1 } - match: { tokens.0.token: foo bar russ } --- "Normalization charfilter": - do: indices.analyze: - char_filter: icu_normalizer - text: Foo Bar Ruß - tokenizer: keyword + body: + char_filter: [icu_normalizer] + text: Foo Bar Ruß + tokenizer: keyword - length: { tokens: 1 } - match: { tokens.0.token: foo bar russ } --- "Folding filter": - do: indices.analyze: - filter: icu_folding - text: Foo Bar résumé - tokenizer: keyword + body: + filter: [icu_folding] + text: Foo Bar résumé + tokenizer: keyword - length: { tokens: 1 } - match: { tokens.0.token: foo bar resume } diff --git a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/10_basic.yaml b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/10_basic.yaml index 42df558567d..1cca2b728e0 100644 --- a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/10_basic.yaml +++ b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/10_basic.yaml @@ -4,8 +4,9 @@ "Analyzer": - do: indices.analyze: - text: JR新宿駅の近くにビールを飲みに行こうか - analyzer: kuromoji + body: + text: JR新宿駅の近くにビールを飲みに行こうか + analyzer: kuromoji - length: { tokens: 7 } - match: { tokens.0.token: jr } - match: { tokens.1.token: 新宿 } @@ -18,8 +19,9 @@ "Tokenizer": - do: indices.analyze: - text: 関西国際空港 - tokenizer: kuromoji_tokenizer + body: + text: 関西国際空港 + tokenizer: kuromoji_tokenizer - length: { tokens: 4 } - match: { tokens.0.token: 関西 } - match: { tokens.1.token: 関西国際空港 } @@ -29,26 +31,29 @@ "Baseform filter": - do: indices.analyze: - text: 飲み - tokenizer: kuromoji_tokenizer - filter: kuromoji_baseform + body: + text: 飲み + tokenizer: kuromoji_tokenizer + filter: [kuromoji_baseform] - length: { tokens: 1 } - match: { tokens.0.token: 飲む } --- "Reading filter": - do: indices.analyze: - text: 寿司 - tokenizer: kuromoji_tokenizer - filter: kuromoji_readingform + body: + text: 寿司 + tokenizer: kuromoji_tokenizer + filter: [kuromoji_readingform] - length: { tokens: 1 } - match: { tokens.0.token: スシ } --- "Stemming filter": - do: indices.analyze: - text: サーバー - tokenizer: kuromoji_tokenizer - filter: kuromoji_stemmer + body: + text: サーバー + tokenizer: kuromoji_tokenizer + filter: [kuromoji_stemmer] - length: { tokens: 1 } - match: { tokens.0.token: サーバ } diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml index 02d4b315b6e..1f326fe3776 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml @@ -22,8 +22,9 @@ - do: indices.analyze: index: phonetic_sample - analyzer: my_analyzer - text: Joe Bloggs + body: + analyzer: my_analyzer + text: Joe Bloggs - length: { tokens: 4 } - match: { tokens.0.token: J } diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml index 675847e557e..5af9f48aa80 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml @@ -22,8 +22,9 @@ - do: indices.analyze: index: phonetic_sample - analyzer: my_analyzer - text: supercalifragilisticexpialidocious + body: + analyzer: my_analyzer + text: supercalifragilisticexpialidocious - length: { tokens: 1 } - match: { tokens.0.token: SPRKLF } diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml index 015610af172..259b0adea74 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml @@ -24,8 +24,9 @@ - do: indices.analyze: index: phonetic_sample - analyzer: my_analyzer - text: Szwarc + body: + analyzer: my_analyzer + text: Szwarc - length: { tokens: 1 } - match: { tokens.0.token: Svarts } diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml index 5125ae3d684..c67b6892bc9 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml @@ -21,8 +21,9 @@ - do: indices.analyze: index: phonetic_sample - analyzer: my_analyzer - text: Moskowitz + body: + analyzer: my_analyzer + text: Moskowitz - length: { tokens: 1 } - match: { tokens.0.token: "645740" } diff --git a/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/10_basic.yaml b/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/10_basic.yaml index 2549f774f81..0f1b2805c93 100644 --- a/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/10_basic.yaml +++ b/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/10_basic.yaml @@ -3,8 +3,9 @@ "Tokenizer": - do: indices.analyze: - text: 我购买了道具和服装。 - tokenizer: smartcn_tokenizer + body: + text: 我购买了道具和服装。 + tokenizer: smartcn_tokenizer - length: { tokens: 7 } - match: { tokens.0.token: 我 } - match: { tokens.1.token: 购买 } @@ -17,8 +18,9 @@ "Analyzer": - do: indices.analyze: - text: 我购买了道具和服装。 - analyzer: smartcn + body: + text: 我购买了道具和服装。 + analyzer: smartcn - length: { tokens: 6 } - match: { tokens.0.token: 我 } - match: { tokens.1.token: 购买 } diff --git a/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/10_basic.yaml b/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/10_basic.yaml index f87f00b7922..1941126c64f 100644 --- a/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/10_basic.yaml +++ b/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/10_basic.yaml @@ -3,16 +3,18 @@ "Stemmer": - do: indices.analyze: - text: studenci - tokenizer: keyword - filter: polish_stem + body: + text: studenci + tokenizer: keyword + filter: [polish_stem] - length: { tokens: 1 } - match: { tokens.0.token: student } --- "Analyzer": - do: indices.analyze: - text: studenta był - analyzer: polish + body: + text: studenta był + analyzer: polish - length: { tokens: 1 } - match: { tokens.0.token: student } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml index 35d4a2b5222..268cd781289 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml @@ -8,7 +8,8 @@ setup: "Basic test": - do: indices.analyze: - text: Foo Bar + body: + text: Foo Bar - length: { tokens: 2 } - match: { tokens.0.token: foo } - match: { tokens.1.token: bar } @@ -17,9 +18,10 @@ setup: "Tokenizer and filter": - do: indices.analyze: - filter: lowercase - text: Foo Bar - tokenizer: keyword + body: + filter: [lowercase] + text: Foo Bar + tokenizer: keyword - length: { tokens: 1 } - match: { tokens.0.token: foo bar } @@ -38,9 +40,10 @@ setup: - do: indices.analyze: - field: text index: test - text: Foo Bar! + body: + field: text + text: Foo Bar! - length: { tokens: 2 } - match: { tokens.0.token: Foo } - match: { tokens.1.token: Bar! } @@ -52,14 +55,6 @@ setup: - length: {tokens: 1 } - match: { tokens.0.token: foo bar } --- -"Body params override query string": - - do: - indices.analyze: - text: Foo Bar - body: { "text": "Bar Foo", "filter": ["lowercase"], "tokenizer": keyword } - - length: {tokens: 1 } - - match: { tokens.0.token: bar foo } ---- "Array text": - do: indices.analyze: