Fix `_analyze` API to correctly use normalizers when specified (#48866)

Currently the `_analyze` endpoint doesn't correctly use normalizers specified
in the request. This change fixes that by returning the resolved normalizer from
TransportAnalyzeAction#getAnalyzer and updates test to be able to catch this
in the future.

Closes #48650
This commit is contained in:
bellengao 2019-11-15 02:50:46 +08:00 committed by Christoph Büscher
parent e8f49cdee0
commit 6ce04429c6
2 changed files with 4 additions and 2 deletions

View File

@ -171,6 +171,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeAc
if (analyzer == null) { if (analyzer == null) {
throw new IllegalArgumentException("failed to find normalizer under [" + request.normalizer() + "]"); throw new IllegalArgumentException("failed to find normalizer under [" + request.normalizer() + "]");
} }
return analyzer;
} }
if (request.field() != null) { if (request.field() != null) {
if (indexService == null) { if (indexService == null) {

View File

@ -443,13 +443,14 @@ public class TransportAnalyzeActionTests extends ESTestCase {
public void testNormalizerWithIndex() throws IOException { public void testNormalizerWithIndex() throws IOException {
AnalyzeAction.Request request = new AnalyzeAction.Request("index"); AnalyzeAction.Request request = new AnalyzeAction.Request("index");
request.normalizer("my_normalizer"); request.normalizer("my_normalizer");
request.text("ABc"); // this should be lowercased and only emit a single token
request.text("Wi-fi");
AnalyzeAction.Response analyze AnalyzeAction.Response analyze
= TransportAnalyzeAction.analyze(request, registry, mockIndexService(), maxTokenCount); = TransportAnalyzeAction.analyze(request, registry, mockIndexService(), maxTokenCount);
List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens(); List<AnalyzeAction.AnalyzeToken> tokens = analyze.getTokens();
assertEquals(1, tokens.size()); assertEquals(1, tokens.size());
assertEquals("abc", tokens.get(0).getTerm()); assertEquals("wi-fi", tokens.get(0).getTerm());
} }
/** /**