Remove `token_filter` in _analyze API

Remove the param and change docs

Closes #20283
This commit is contained in:
Jun Ohtani 2016-09-02 00:04:13 +09:00
parent 0e8a43e826
commit 3d9f8ed764
4 changed files with 23 additions and 8 deletions

View File

@ -50,7 +50,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
public static final ParseField TEXT = new ParseField("text");
public static final ParseField FIELD = new ParseField("field");
public static final ParseField TOKENIZER = new ParseField("tokenizer");
public static final ParseField TOKEN_FILTERS = new ParseField("filter", "token_filter");
public static final ParseField TOKEN_FILTERS = new ParseField("filter");
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
public static final ParseField EXPLAIN = new ParseField("explain");
public static final ParseField ATTRIBUTES = new ParseField("attributes");
@ -77,7 +77,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
if (request.hasParam("tokenizer")) {
analyzeRequest.tokenizer(request.param("tokenizer"));
}
for (String filter : request.paramAsStringArray("filter", request.paramAsStringArray("token_filter", Strings.EMPTY_ARRAY))) {
for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) {
analyzeRequest.addTokenFilter(filter);
}
for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) {
@ -144,7 +144,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
analyzeRequest.addTokenFilter(parser.map());
} else {
throw new IllegalArgumentException(currentFieldName
+ " array element should contain token_filter's name or setting");
+ " array element should contain filter's name or setting");
}
}
} else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS)

View File

@ -181,6 +181,21 @@ public class RestAnalyzeActionTests extends ESTestCase {
assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
}
content = XContentFactory.jsonBuilder()
.startObject()
.field("text", "THIS IS A TEST")
.field("tokenizer", "keyword")
.array("token_filter", "lowercase")
.endObject().bytes();
analyzeRequest = new AnalyzeRequest("for test");
try {
RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
} catch (Exception e) {
assertThat(e, instanceOf(IllegalArgumentException.class));
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]"));
}
}
}

View File

@ -43,13 +43,13 @@ curl -XGET 'localhost:9200/_analyze' -d '
curl -XGET 'localhost:9200/_analyze' -d '
{
"tokenizer" : "keyword",
"token_filter" : ["lowercase"],
"filter" : ["lowercase"],
"char_filter" : ["html_strip"],
"text" : "this is a <b>test</b>"
}'
--------------------------------------------------
deprecated[5.0.0, Use `filter`/`token_filter`/`char_filter` instead of `filters`/`token_filters`/`char_filters`]
deprecated[5.0.0, Use `filter`/`char_filter` instead of `filters`/`char_filters` and `token_filters` has been removed]
Custom tokenizers, token filters, and character filters can be specified in the request body as follows:
@ -112,7 +112,7 @@ provided it doesn't start with `{` :
[source,js]
--------------------------------------------------
curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&token_filter=lowercase&char_filter=html_strip' -d 'this is a <b>test</b>'
curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&char_filter=html_strip' -d 'this is a <b>test</b>'
--------------------------------------------------
=== Explain Analyze

View File

@ -67,8 +67,8 @@ removed in Elasticsearch 6.0.0.
==== Analyze API changes
The deprecated `filters`/`token_filters`/`char_filters` parameter has been
renamed `filter`/`token_filter`/`char_filter`.
The deprecated `filters`/`char_filters` parameter has been
renamed `filter`/`char_filter` and `token_filters` parameter has been removed.
==== `DELETE /_query` endpoint removed