Merge pull request #20285 from johtani/fix/remove_token_filter_param_in_analyze_api
Remove `token_filter` in _analyze API
This commit is contained in:
commit
c4759bcc02
|
@ -50,7 +50,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
public static final ParseField TEXT = new ParseField("text");
|
||||
public static final ParseField FIELD = new ParseField("field");
|
||||
public static final ParseField TOKENIZER = new ParseField("tokenizer");
|
||||
public static final ParseField TOKEN_FILTERS = new ParseField("filter", "token_filter");
|
||||
public static final ParseField TOKEN_FILTERS = new ParseField("filter");
|
||||
public static final ParseField CHAR_FILTERS = new ParseField("char_filter");
|
||||
public static final ParseField EXPLAIN = new ParseField("explain");
|
||||
public static final ParseField ATTRIBUTES = new ParseField("attributes");
|
||||
|
@ -77,7 +77,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
if (request.hasParam("tokenizer")) {
|
||||
analyzeRequest.tokenizer(request.param("tokenizer"));
|
||||
}
|
||||
for (String filter : request.paramAsStringArray("filter", request.paramAsStringArray("token_filter", Strings.EMPTY_ARRAY))) {
|
||||
for (String filter : request.paramAsStringArray("filter", Strings.EMPTY_ARRAY)) {
|
||||
analyzeRequest.addTokenFilter(filter);
|
||||
}
|
||||
for (String charFilter : request.paramAsStringArray("char_filter", Strings.EMPTY_ARRAY)) {
|
||||
|
@ -144,7 +144,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
analyzeRequest.addTokenFilter(parser.map());
|
||||
} else {
|
||||
throw new IllegalArgumentException(currentFieldName
|
||||
+ " array element should contain token_filter's name or setting");
|
||||
+ " array element should contain filter's name or setting");
|
||||
}
|
||||
}
|
||||
} else if (parseFieldMatcher.match(currentFieldName, Fields.CHAR_FILTERS)
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
|
@ -90,14 +89,10 @@ public class RestAnalyzeActionTests extends ESTestCase {
|
|||
|
||||
public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception {
|
||||
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
|
||||
|
||||
try {
|
||||
RestAnalyzeAction.buildFromContent(new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||
fail("shouldn't get here");
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertThat(e.getMessage(), equalTo("Failed to parse request body"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(
|
||||
new BytesArray("{invalid_json}"), analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), equalTo("Failed to parse request body"));
|
||||
}
|
||||
|
||||
public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception {
|
||||
|
@ -107,14 +102,9 @@ public class RestAnalyzeActionTests extends ESTestCase {
|
|||
.field("text", "THIS IS A TEST")
|
||||
.field("unknown", "keyword")
|
||||
.endObject().bytes();
|
||||
|
||||
try {
|
||||
RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||
fail("shouldn't get here");
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
|
||||
}
|
||||
|
||||
public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception {
|
||||
|
@ -123,64 +113,57 @@ public class RestAnalyzeActionTests extends ESTestCase {
|
|||
.startObject()
|
||||
.field("explain", "fals")
|
||||
.endObject().bytes();
|
||||
try {
|
||||
RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||
fail("shouldn't get here");
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
|
||||
}
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'"));
|
||||
}
|
||||
|
||||
public void testDeprecatedParamException() throws Exception {
|
||||
BytesReference content = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("filters", "lowercase")
|
||||
.endObject().bytes();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("filters", "lowercase")
|
||||
.endObject().bytes(),
|
||||
new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [filters]"));
|
||||
|
||||
AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test");
|
||||
|
||||
try {
|
||||
RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [filters]"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("token_filters", "lowercase")
|
||||
.endObject().bytes(),
|
||||
new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]"));
|
||||
|
||||
content = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("token_filters", "lowercase")
|
||||
.endObject().bytes();
|
||||
|
||||
analyzeRequest = new AnalyzeRequest("for test");
|
||||
|
||||
try {
|
||||
RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]"));
|
||||
}
|
||||
|
||||
content = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("char_filters", "lowercase")
|
||||
.endObject().bytes();
|
||||
|
||||
analyzeRequest = new AnalyzeRequest("for test");
|
||||
|
||||
try {
|
||||
RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY));
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(IllegalArgumentException.class));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("char_filters", "lowercase")
|
||||
.endObject().bytes(),
|
||||
new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]"));
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> RestAnalyzeAction.buildFromContent(
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("text", "THIS IS A TEST")
|
||||
.field("tokenizer", "keyword")
|
||||
.array("token_filter", "lowercase")
|
||||
.endObject().bytes()
|
||||
, new AnalyzeRequest("for test"), new ParseFieldMatcher(Settings.EMPTY)));
|
||||
assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -43,13 +43,13 @@ curl -XGET 'localhost:9200/_analyze' -d '
|
|||
curl -XGET 'localhost:9200/_analyze' -d '
|
||||
{
|
||||
"tokenizer" : "keyword",
|
||||
"token_filter" : ["lowercase"],
|
||||
"filter" : ["lowercase"],
|
||||
"char_filter" : ["html_strip"],
|
||||
"text" : "this is a <b>test</b>"
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
deprecated[5.0.0, Use `filter`/`token_filter`/`char_filter` instead of `filters`/`token_filters`/`char_filters`]
|
||||
deprecated[5.0.0, Use `filter`/`char_filter` instead of `filters`/`char_filters` and `token_filters` has been removed]
|
||||
|
||||
Custom tokenizers, token filters, and character filters can be specified in the request body as follows:
|
||||
|
||||
|
@ -112,7 +112,7 @@ provided it doesn't start with `{` :
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&token_filter=lowercase&char_filter=html_strip' -d 'this is a <b>test</b>'
|
||||
curl -XGET 'localhost:9200/_analyze?tokenizer=keyword&filter=lowercase&char_filter=html_strip' -d 'this is a <b>test</b>'
|
||||
--------------------------------------------------
|
||||
|
||||
=== Explain Analyze
|
||||
|
|
|
@ -67,8 +67,8 @@ removed in Elasticsearch 6.0.0.
|
|||
|
||||
==== Analyze API changes
|
||||
|
||||
The deprecated `filters`/`token_filters`/`char_filters` parameter has been
|
||||
renamed `filter`/`token_filter`/`char_filter`.
|
||||
The `filters` and `char_filters` parameters have been renamed `filter` and `char_filter`.
|
||||
The `token_filters` parameter has been removed. Use `filter` instead.
|
||||
|
||||
==== `DELETE /_query` endpoint removed
|
||||
|
||||
|
|
Loading…
Reference in New Issue