[Docs] Extend common-grams-tokenfilter doctest example (#36807)

Adding an example output using the "_analyze" API and expected response.
This commit is contained in:
Christoph Büscher 2018-12-19 09:49:23 +01:00 committed by GitHub
parent 1e9d0bb01e
commit 132ccbec2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -58,12 +58,12 @@ PUT /common_grams_example
"filter": {
"common_grams": {
"type": "common_grams",
"common_words": ["a", "an", "the"]
"common_words": ["the", "is", "a"]
},
"common_grams_query": {
"type": "common_grams",
"query_mode": true,
"common_words": ["a", "an", "the"]
"common_words": ["the", "is", "a"]
}
}
}
@ -71,3 +71,101 @@ PUT /common_grams_example
}
--------------------------------------------------
// CONSOLE
You can see the output by using e.g. the `_analyze` endpoint:
[source,js]
--------------------------------------------------
POST /common_grams_example/_analyze
{
"analyzer" : "index_grams",
"text" : "the quick brown is a fox"
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
And the response will be:
[source,js]
--------------------------------------------------
{
"tokens" : [
{
"token" : "the",
"start_offset" : 0,
"end_offset" : 3,
"type" : "word",
"position" : 0
},
{
"token" : "the_quick",
"start_offset" : 0,
"end_offset" : 9,
"type" : "gram",
"position" : 0,
"positionLength" : 2
},
{
"token" : "quick",
"start_offset" : 4,
"end_offset" : 9,
"type" : "word",
"position" : 1
},
{
"token" : "brown",
"start_offset" : 10,
"end_offset" : 15,
"type" : "word",
"position" : 2
},
{
"token" : "brown_is",
"start_offset" : 10,
"end_offset" : 18,
"type" : "gram",
"position" : 2,
"positionLength" : 2
},
{
"token" : "is",
"start_offset" : 16,
"end_offset" : 18,
"type" : "word",
"position" : 3
},
{
"token" : "is_a",
"start_offset" : 16,
"end_offset" : 20,
"type" : "gram",
"position" : 3,
"positionLength" : 2
},
{
"token" : "a",
"start_offset" : 19,
"end_offset" : 20,
"type" : "word",
"position" : 4
},
{
"token" : "a_fox",
"start_offset" : 19,
"end_offset" : 24,
"type" : "gram",
"position" : 4,
"positionLength" : 2
},
{
"token" : "fox",
"start_offset" : 21,
"end_offset" : 24,
"type" : "word",
"position" : 5
}
]
}
--------------------------------------------------
// TESTRESPONSE