mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Remove token_filter
in _analyze API
Remove the param and change docs Closes #20283
This commit is contained in:
parent
0e8a43e826
commit
3d9f8ed764
@ -50,7 +50,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||||||
public static final ParseField TEXT = new ParseField("text");
|
public static final ParseField TEXT = new ParseField("text");
|
||||||
public static final ParseField FIELD = new ParseField("field");
|
public static final ParseField FIELD = new ParseField("field");
|
||||||
public static final ParseField TOKENIZER = new ParseField("tokenizer");
|
public static final ParseField TOKENIZER = new ParseField("tokenizer");
|
||||||
public static final ParseField TOKEN_FILTERS = new ParseField("filter", "token_filter");
|
public static final ParseField TOKEN_FILTERS = new ParseField("filter");
|
||||||
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
|
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
|
||||||
public static final ParseField EXPLAIN = new ParseField("explain");
|
public static final ParseField EXPLAIN = new ParseField("explain");
|
||||||
public static final ParseField ATTRIBUTES = new ParseField("attributes");
|
public static final ParseField ATTRIBUTES = new ParseField("attributes");
|
||||||
@ -77,7 +77,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||||||
if (request.hasParam("tokenizer")) {
|
if (request.hasParam("tokenizer")) {
|
||||||
analyzeRequest.tokenizer(request.param("tokenizer"));
|
analyzeRequest.tokenizer(request.param("tokenizer"));
|
||||||
}
|
}
|
||||||
for (String filter : request.paramAsStringArray("filter", request.paramAsStringArray("token_filter", Strings.EMPTY_ARRAY))) {
|
for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) {
|
||||||
analyzeRequest.addTokenFilter(filter);
|
analyzeRequest.addTokenFilter(filter);
|
||||||
}
|
}
|
||||||
for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) {
|
for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) {
|
||||||
@ -144,7 +144,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||||||
analyzeRequest.addTokenFilter(parser.map());
|
analyzeRequest.addTokenFilter(parser.map());
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException(currentFieldName
|
throw new IllegalArgumentException(currentFieldName
|
||||||
+ " array element should contain token_filter's name or setting");
|
+ " array element should contain filter's name or setting");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS)
|
} else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS)
|
||||||
|
@ -181,6 +181,21 @@ public class RestAnalyzeActionTests extends ESTestCase {
|
|||||||
assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
|
assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
content = XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("text", "THIS IS A TEST")
|
||||||
|
.field("tokenizer", "keyword")
|
||||||
|
.array("token_filter", "lowercase")
|
||||||
|
.endObject().bytes();
|
||||||
|
|
||||||
|
analyzeRequest = new AnalyzeRequest("for test");
|
||||||
|
|
||||||
|
try {
|
||||||
|
RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||||
|
} catch (Exception e) {
|
||||||
|
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||||
|
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -43,13 +43,13 @@ curl -XGET 'localhost:9200/_analyze' -d '
|
|||||||
curl -XGET 'localhost:9200/_analyze' -d '
|
curl -XGET 'localhost:9200/_analyze' -d '
|
||||||
{
|
{
|
||||||
"tokenizer" : "keyword",
|
"tokenizer" : "keyword",
|
||||||
"token_filter" : ["lowercase"],
|
"filter" : ["lowercase"],
|
||||||
"char_filter" : ["html_strip"],
|
"char_filter" : ["html_strip"],
|
||||||
"text" : "this is a <b>test</b>"
|
"text" : "this is a <b>test</b>"
|
||||||
}'
|
}'
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
deprecated[5.0.0, Use `filter`/`token_filter`/`char_filter` instead of `filters`/`token_filters`/`char_filters`]
|
deprecated[5.0.0, Use `filter`/`char_filter` instead of `filters`/`char_filters` and `token_filters` has been removed]
|
||||||
|
|
||||||
Custom tokenizers, token filters, and character filters can be specified in the request body as follows:
|
Custom tokenizers, token filters, and character filters can be specified in the request body as follows:
|
||||||
|
|
||||||
@ -112,7 +112,7 @@ provided it doesn't start with `{` :
|
|||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&token_filter=lowercase&char_filter=html_strip' -d 'this is a <b>test</b>'
|
curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&char_filter=html_strip' -d 'this is a <b>test</b>'
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
=== Explain Analyze
|
=== Explain Analyze
|
||||||
|
@ -67,8 +67,8 @@ removed in Elasticsearch 6.0.0.
|
|||||||
|
|
||||||
==== Analyze API changes
|
==== Analyze API changes
|
||||||
|
|
||||||
The deprecated `filters`/`token_filters`/`char_filters` parameter has been
|
The deprecated `filters`/`char_filters` parameter has been
|
||||||
renamed `filter`/`token_filter`/`char_filter`.
|
renamed `filter`/`char_filter` and `token_filters` parameter has been removed.
|
||||||
|
|
||||||
==== `DELETE /_query` endpoint removed
|
==== `DELETE /_query` endpoint removed
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user