parent
606b7ea139
commit
b302b09b85
|
@ -45,23 +45,23 @@ Let's try and index some twitter like information. First, let's index some tweet
|
|||
----
|
||||
curl -XPUT 'http://localhost:9200/twitter/_doc/1?pretty' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T13:12:00",
|
||||
"message": "Trying out Elasticsearch, so far so good?"
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T13:12:00",
|
||||
"message": "Trying out Elasticsearch, so far so good?"
|
||||
}'
|
||||
|
||||
curl -XPUT 'http://localhost:9200/twitter/_doc/2?pretty' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T14:12:12",
|
||||
"message": "Another tweet, will it be indexed?"
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T14:12:12",
|
||||
"message": "Another tweet, will it be indexed?"
|
||||
}'
|
||||
|
||||
curl -XPUT 'http://localhost:9200/twitter/_doc/3?pretty' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"user": "elastic",
|
||||
"post_date": "2010-01-15T01:46:38",
|
||||
"message": "Building the site, should be kewl"
|
||||
"user": "elastic",
|
||||
"post_date": "2010-01-15T01:46:38",
|
||||
"message": "Building the site, should be kewl"
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -87,9 +87,9 @@ We can also use the JSON query language Elasticsearch provides instead of a quer
|
|||
----
|
||||
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"query" : {
|
||||
"match" : { "user": "kimchy" }
|
||||
}
|
||||
"query" : {
|
||||
"match" : { "user": "kimchy" }
|
||||
}
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -98,9 +98,9 @@ Just for kicks, let's get all the documents stored (we should see the tweet from
|
|||
----
|
||||
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
}
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
}
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -109,11 +109,11 @@ We can also do range search (the `post_date` was automatically identified as dat
|
|||
----
|
||||
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"query" : {
|
||||
"range" : {
|
||||
"post_date" : { "from" : "2009-11-15T13:00:00", "to" : "2009-11-15T14:00:00" }
|
||||
}
|
||||
"query" : {
|
||||
"range" : {
|
||||
"post_date" : { "from" : "2009-11-15T13:00:00", "to" : "2009-11-15T14:00:00" }
|
||||
}
|
||||
}
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -130,16 +130,16 @@ Another way to define our simple twitter system is to have a different index per
|
|||
----
|
||||
curl -XPUT 'http://localhost:9200/kimchy/_doc/1?pretty' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T13:12:00",
|
||||
"message": "Trying out Elasticsearch, so far so good?"
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T13:12:00",
|
||||
"message": "Trying out Elasticsearch, so far so good?"
|
||||
}'
|
||||
|
||||
curl -XPUT 'http://localhost:9200/kimchy/_doc/2?pretty' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T14:12:12",
|
||||
"message": "Another tweet, will it be indexed?"
|
||||
"user": "kimchy",
|
||||
"post_date": "2009-11-15T14:12:12",
|
||||
"message": "Another tweet, will it be indexed?"
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -150,10 +150,10 @@ Complete control on the index level is allowed. As an example, in the above case
|
|||
----
|
||||
curl -XPUT http://localhost:9200/another_user?pretty -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"settings" : {
|
||||
"index.number_of_shards" : 2,
|
||||
"index.number_of_replicas" : 1
|
||||
}
|
||||
"settings" : {
|
||||
"index.number_of_shards" : 2,
|
||||
"index.number_of_replicas" : 1
|
||||
}
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -163,9 +163,9 @@ index (twitter user), for example:
|
|||
----
|
||||
curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
}
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
}
|
||||
}'
|
||||
----
|
||||
|
||||
|
@ -174,9 +174,9 @@ Or on all the indices:
|
|||
----
|
||||
curl -XGET 'http://localhost:9200/_search?pretty=true' -H 'Content-Type: application/json' -d '
|
||||
{
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
}
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
}
|
||||
}'
|
||||
----
|
||||
|
||||
|
|
|
@ -145,16 +145,16 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
----
|
||||
PUT sample_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_sample_analyzer" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["sample"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_sample_analyzer": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "sample" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[skip: REMOVE THIS COMMENT.]
|
||||
|
@ -212,22 +212,22 @@ For example, the following request creates a custom `sample` filter with
|
|||
----
|
||||
PUT sample_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_custom_analyzer" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["my_custom_sample_token_filter"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"my_custom_sample_token_filter" : {
|
||||
"type" : "sample",
|
||||
"foo" : true
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_custom_analyzer": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "my_custom_sample_token_filter" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"my_custom_sample_token_filter": {
|
||||
"type": "sample",
|
||||
"foo": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[skip: REMOVE THIS COMMENT.]
|
||||
|
|
|
@ -77,15 +77,15 @@ apostrophe token filter to configure a new
|
|||
--------------------------------------------------
|
||||
PUT /apostrophe_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"standard_apostrophe" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["apostrophe"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"standard_apostrophe": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "apostrophe" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -83,16 +83,16 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
--------------------------------------------------
|
||||
PUT /asciifold_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"standard_asciifolding" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["asciifolding"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"standard_asciifolding": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "asciifolding" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -118,21 +118,21 @@ For example, the following request creates a custom `asciifolding` filter with
|
|||
--------------------------------------------------
|
||||
PUT /asciifold_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"standard_asciifolding" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["my_ascii_folding"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"my_ascii_folding" : {
|
||||
"type" : "asciifolding",
|
||||
"preserve_original" : true
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"standard_asciifolding": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "my_ascii_folding" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"my_ascii_folding": {
|
||||
"type": "asciifolding",
|
||||
"preserve_original": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -126,16 +126,16 @@ CJK bigram token filter to configure a new
|
|||
--------------------------------------------------
|
||||
PUT /cjk_bigram_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"standard_cjk_bigram" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["cjk_bigram"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"standard_cjk_bigram": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "cjk_bigram" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -176,26 +176,26 @@ parameters.
|
|||
--------------------------------------------------
|
||||
PUT /cjk_bigram_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"han_bigrams" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["han_bigrams_filter"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"han_bigrams_filter" : {
|
||||
"type" : "cjk_bigram",
|
||||
"ignored_scripts": [
|
||||
"hangul",
|
||||
"hiragana",
|
||||
"katakana"
|
||||
],
|
||||
"output_unigrams" : true
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"han_bigrams": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "han_bigrams_filter" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"han_bigrams_filter": {
|
||||
"type": "cjk_bigram",
|
||||
"ignored_scripts": [
|
||||
"hangul",
|
||||
"hiragana",
|
||||
"katakana"
|
||||
],
|
||||
"output_unigrams": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -69,15 +69,15 @@ CJK width token filter to configure a new
|
|||
--------------------------------------------------
|
||||
PUT /cjk_width_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"standard_cjk_width" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["cjk_width"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"standard_cjk_width": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "cjk_width" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -132,16 +132,16 @@ classic token filter to configure a new
|
|||
--------------------------------------------------
|
||||
PUT /classic_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"classic_analyzer" : {
|
||||
"tokenizer" : "classic",
|
||||
"filter" : ["classic"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"classic_analyzer": {
|
||||
"tokenizer": "classic",
|
||||
"filter": [ "classic" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -126,22 +126,22 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
--------------------------------------------------
|
||||
PUT /common_grams_example
|
||||
{
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"index_grams": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": ["common_grams"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"common_grams": {
|
||||
"type": "common_grams",
|
||||
"common_words": ["a", "is", "the"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"index_grams": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "common_grams" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"common_grams": {
|
||||
"type": "common_grams",
|
||||
"common_words": [ "a", "is", "the" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -206,23 +206,23 @@ For example, the following request creates a custom `common_grams` filter with
|
|||
--------------------------------------------------
|
||||
PUT /common_grams_example
|
||||
{
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"index_grams": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": ["common_grams_query"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"common_grams_query": {
|
||||
"type": "common_grams",
|
||||
"common_words": ["a", "is", "the"],
|
||||
"ignore_case": true,
|
||||
"query_mode": true
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"index_grams": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "common_grams_query" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"common_grams_query": {
|
||||
"type": "common_grams",
|
||||
"common_words": [ "a", "is", "the" ],
|
||||
"ignore_case": true,
|
||||
"query_mode": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -75,15 +75,15 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
--------------------------------------------------
|
||||
PUT /decimal_digit_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"whitespace_decimal_digit" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["decimal_digit"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"whitespace_decimal_digit": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "decimal_digit" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -96,16 +96,16 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
--------------------------------------------------
|
||||
PUT /elision_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"whitespace_elision" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["elision"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"whitespace_elision": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "elision" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -165,22 +165,22 @@ and `j'` elisions:
|
|||
--------------------------------------------------
|
||||
PUT /elision_case_sensitive_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"default" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["elision_case_sensitive"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"elision_case_sensitive" : {
|
||||
"type" : "elision",
|
||||
"articles" : ["l", "m", "t", "qu", "n", "s", "j"],
|
||||
"articles_case": true
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"default": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "elision_case_sensitive" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"elision_case_sensitive": {
|
||||
"type": "elision",
|
||||
"articles": [ "l", "m", "t", "qu", "n", "s", "j" ],
|
||||
"articles_case": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -82,16 +82,16 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
--------------------------------------------------
|
||||
PUT lowercase_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"whitespace_lowercase" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["lowercase"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"whitespace_lowercase": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "lowercase" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -36,22 +36,22 @@ You can set it up like:
|
|||
--------------------------------------------------
|
||||
PUT /multiplexer_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_analyzer" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : [ "my_multiplexer" ]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"my_multiplexer" : {
|
||||
"type" : "multiplexer",
|
||||
"filters" : [ "lowercase", "lowercase, porter_stem" ]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_analyzer": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "my_multiplexer" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"my_multiplexer": {
|
||||
"type": "multiplexer",
|
||||
"filters": [ "lowercase", "lowercase, porter_stem" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -17,21 +17,21 @@ For example:
|
|||
--------------------------------------------------
|
||||
PUT /my_index
|
||||
{
|
||||
"settings": {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_analyzer" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["lowercase", "my_snow"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"my_snow" : {
|
||||
"type" : "snowball",
|
||||
"language" : "Lovins"
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_analyzer": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "lowercase", "my_snow" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"my_snow": {
|
||||
"type": "snowball",
|
||||
"language": "Lovins"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -25,22 +25,22 @@ Here is an example:
|
|||
--------------------------------------------------
|
||||
PUT /my_index
|
||||
{
|
||||
"settings": {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_analyzer" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["lowercase", "custom_stems", "porter_stem"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"custom_stems" : {
|
||||
"type" : "stemmer_override",
|
||||
"rules_path" : "analysis/stemmer_override.txt"
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_analyzer": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "lowercase", "custom_stems", "porter_stem" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"custom_stems": {
|
||||
"type": "stemmer_override",
|
||||
"rules_path": "analysis/stemmer_override.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -57,24 +57,24 @@ You can also define the overrides rules inline:
|
|||
--------------------------------------------------
|
||||
PUT /my_index
|
||||
{
|
||||
"settings": {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_analyzer" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["lowercase", "custom_stems", "porter_stem"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"custom_stems" : {
|
||||
"type" : "stemmer_override",
|
||||
"rules" : [
|
||||
"running, runs => run",
|
||||
"stemmer => stemmer"
|
||||
]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_analyzer": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "lowercase", "custom_stems", "porter_stem" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"custom_stems": {
|
||||
"type": "stemmer_override",
|
||||
"rules": [
|
||||
"running, runs => run",
|
||||
"stemmer => stemmer"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -26,24 +26,24 @@ Here is an example:
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"search_synonyms" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["graph_synonyms"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"graph_synonyms" : {
|
||||
"type" : "synonym_graph",
|
||||
"synonyms_path" : "analysis/synonym.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"search_synonyms": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "graph_synonyms" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"graph_synonyms": {
|
||||
"type": "synonym_graph",
|
||||
"synonyms_path": "analysis/synonym.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -61,29 +61,29 @@ to note that only those synonym rules which cannot get parsed are ignored. For i
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"synonym" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["my_stop", "synonym_graph"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"my_stop": {
|
||||
"type" : "stop",
|
||||
"stopwords": ["bar"]
|
||||
},
|
||||
"synonym_graph" : {
|
||||
"type" : "synonym_graph",
|
||||
"lenient": true,
|
||||
"synonyms" : ["foo, bar => baz"]
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"synonym": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "my_stop", "synonym_graph" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"my_stop": {
|
||||
"type": "stop",
|
||||
"stopwords": [ "bar" ]
|
||||
},
|
||||
"synonym_graph": {
|
||||
"type": "synonym_graph",
|
||||
"lenient": true,
|
||||
"synonyms": [ "foo, bar => baz" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -121,21 +121,21 @@ configuration file (note use of `synonyms` instead of `synonyms_path`):
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"filter" : {
|
||||
"synonym" : {
|
||||
"type" : "synonym_graph",
|
||||
"synonyms" : [
|
||||
"lol, laughing out loud",
|
||||
"universe, cosmos"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"filter": {
|
||||
"synonym": {
|
||||
"type": "synonym_graph",
|
||||
"synonyms": [
|
||||
"lol, laughing out loud",
|
||||
"universe, cosmos"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -152,23 +152,23 @@ declared using `format`:
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"filter" : {
|
||||
"synonym" : {
|
||||
"type" : "synonym_graph",
|
||||
"format" : "wordnet",
|
||||
"synonyms" : [
|
||||
"s(100000001,1,'abstain',v,1,0).",
|
||||
"s(100000001,2,'refrain',v,1,0).",
|
||||
"s(100000001,3,'desist',v,1,0)."
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"filter": {
|
||||
"synonym": {
|
||||
"type": "synonym_graph",
|
||||
"format": "wordnet",
|
||||
"synonyms": [
|
||||
"s(100000001,1,'abstain',v,1,0).",
|
||||
"s(100000001,2,'refrain',v,1,0).",
|
||||
"s(100000001,3,'desist',v,1,0)."
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -12,24 +12,24 @@ Here is an example:
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"synonym" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["synonym"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"synonym" : {
|
||||
"type" : "synonym",
|
||||
"synonyms_path" : "analysis/synonym.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"synonym": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "synonym" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"synonym": {
|
||||
"type": "synonym",
|
||||
"synonyms_path": "analysis/synonym.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -51,29 +51,29 @@ to note that only those synonym rules which cannot get parsed are ignored. For i
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"synonym" : {
|
||||
"tokenizer" : "standard",
|
||||
"filter" : ["my_stop", "synonym"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"my_stop": {
|
||||
"type" : "stop",
|
||||
"stopwords": ["bar"]
|
||||
},
|
||||
"synonym" : {
|
||||
"type" : "synonym",
|
||||
"lenient": true,
|
||||
"synonyms" : ["foo, bar => baz"]
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"synonym": {
|
||||
"tokenizer": "standard",
|
||||
"filter": [ "my_stop", "synonym" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"my_stop": {
|
||||
"type": "stop",
|
||||
"stopwords": [ "bar" ]
|
||||
},
|
||||
"synonym": {
|
||||
"type": "synonym",
|
||||
"lenient": true,
|
||||
"synonyms": [ "foo, bar => baz" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -112,21 +112,21 @@ configuration file (note use of `synonyms` instead of `synonyms_path`):
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"filter" : {
|
||||
"synonym" : {
|
||||
"type" : "synonym",
|
||||
"synonyms" : [
|
||||
"i-pod, i pod => ipod",
|
||||
"universe, cosmos"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"filter": {
|
||||
"synonym": {
|
||||
"type": "synonym",
|
||||
"synonyms": [
|
||||
"i-pod, i pod => ipod",
|
||||
"universe, cosmos"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -143,23 +143,23 @@ declared using `format`:
|
|||
--------------------------------------------------
|
||||
PUT /test_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"filter" : {
|
||||
"synonym" : {
|
||||
"type" : "synonym",
|
||||
"format" : "wordnet",
|
||||
"synonyms" : [
|
||||
"s(100000001,1,'abstain',v,1,0).",
|
||||
"s(100000001,2,'refrain',v,1,0).",
|
||||
"s(100000001,3,'desist',v,1,0)."
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"filter": {
|
||||
"synonym": {
|
||||
"type": "synonym",
|
||||
"format": "wordnet",
|
||||
"synonyms": [
|
||||
"s(100000001,1,'abstain',v,1,0).",
|
||||
"s(100000001,2,'refrain',v,1,0).",
|
||||
"s(100000001,3,'desist',v,1,0)."
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -92,15 +92,15 @@ The following <<indices-create-index,create index API>> request uses the
|
|||
--------------------------------------------------
|
||||
PUT uppercase_example
|
||||
{
|
||||
"settings" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"whitespace_uppercase" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["uppercase"]
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"whitespace_uppercase": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "uppercase" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -145,26 +145,26 @@ If the follower index is `active`, the API returns the following results:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"follower_indices" : [
|
||||
{
|
||||
"follower_index" : "follower_index",
|
||||
"remote_cluster" : "remote_cluster",
|
||||
"leader_index" : "leader_index",
|
||||
"status" : "active",
|
||||
"parameters" : {
|
||||
"max_read_request_operation_count" : 5120,
|
||||
"max_read_request_size" : "32mb",
|
||||
"max_outstanding_read_requests" : 12,
|
||||
"max_write_request_operation_count" : 5120,
|
||||
"max_write_request_size" : "9223372036854775807b",
|
||||
"max_outstanding_write_requests" : 9,
|
||||
"max_write_buffer_count" : 2147483647,
|
||||
"max_write_buffer_size" : "512mb",
|
||||
"max_retry_delay" : "500ms",
|
||||
"read_poll_timeout" : "1m"
|
||||
}
|
||||
}
|
||||
]
|
||||
"follower_indices": [
|
||||
{
|
||||
"follower_index": "follower_index",
|
||||
"remote_cluster": "remote_cluster",
|
||||
"leader_index": "leader_index",
|
||||
"status": "active",
|
||||
"parameters": {
|
||||
"max_read_request_operation_count": 5120,
|
||||
"max_read_request_size": "32mb",
|
||||
"max_outstanding_read_requests": 12,
|
||||
"max_write_request_operation_count": 5120,
|
||||
"max_write_request_size": "9223372036854775807b",
|
||||
"max_outstanding_write_requests": 9,
|
||||
"max_write_buffer_count": 2147483647,
|
||||
"max_write_buffer_size": "512mb",
|
||||
"max_retry_delay": "500ms",
|
||||
"read_poll_timeout": "1m"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -187,13 +187,13 @@ If the follower index is `paused`, the API returns the following results:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"follower_indices" : [
|
||||
{
|
||||
"follower_index" : "follower_index",
|
||||
"remote_cluster" : "remote_cluster",
|
||||
"leader_index" : "leader_index",
|
||||
"status" : "paused"
|
||||
}
|
||||
]
|
||||
"follower_indices": [
|
||||
{
|
||||
"follower_index": "follower_index",
|
||||
"remote_cluster": "remote_cluster",
|
||||
"leader_index": "leader_index",
|
||||
"status": "paused"
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -185,20 +185,20 @@ This is a short example of a simple reroute API call:
|
|||
--------------------------------------------------
|
||||
POST /_cluster/reroute
|
||||
{
|
||||
"commands" : [
|
||||
{
|
||||
"move" : {
|
||||
"index" : "test", "shard" : 0,
|
||||
"from_node" : "node1", "to_node" : "node2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"allocate_replica" : {
|
||||
"index" : "test", "shard" : 1,
|
||||
"node" : "node3"
|
||||
}
|
||||
}
|
||||
]
|
||||
"commands": [
|
||||
{
|
||||
"move": {
|
||||
"index": "test", "shard": 0,
|
||||
"from_node": "node1", "to_node": "node2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"allocate_replica": {
|
||||
"index": "test", "shard": 1,
|
||||
"node": "node3"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[skip:doc tests run with only a single node]
|
||||
|
|
|
@ -62,9 +62,9 @@ An example of a persistent update:
|
|||
--------------------------------------------------
|
||||
PUT /_cluster/settings
|
||||
{
|
||||
"persistent" : {
|
||||
"indices.recovery.max_bytes_per_sec" : "50mb"
|
||||
}
|
||||
"persistent" : {
|
||||
"indices.recovery.max_bytes_per_sec" : "50mb"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -75,9 +75,9 @@ An example of a transient update:
|
|||
--------------------------------------------------
|
||||
PUT /_cluster/settings?flat_settings=true
|
||||
{
|
||||
"transient" : {
|
||||
"indices.recovery.max_bytes_per_sec" : "20mb"
|
||||
}
|
||||
"transient" : {
|
||||
"indices.recovery.max_bytes_per_sec" : "20mb"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -88,11 +88,11 @@ the transient example:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"persistent" : { },
|
||||
"transient" : {
|
||||
"indices.recovery.max_bytes_per_sec" : "20mb"
|
||||
}
|
||||
...
|
||||
"persistent" : { },
|
||||
"transient" : {
|
||||
"indices.recovery.max_bytes_per_sec" : "20mb"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/\.\.\./"acknowledged": true,/]
|
||||
|
@ -104,9 +104,9 @@ This example resets a setting:
|
|||
--------------------------------------------------
|
||||
PUT /_cluster/settings
|
||||
{
|
||||
"transient" : {
|
||||
"indices.recovery.max_bytes_per_sec" : null
|
||||
}
|
||||
"transient" : {
|
||||
"indices.recovery.max_bytes_per_sec" : null
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -116,9 +116,9 @@ The response does not include settings that have been reset:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"persistent" : {},
|
||||
"transient" : {}
|
||||
...
|
||||
"persistent" : {},
|
||||
"transient" : {}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/\.\.\./"acknowledged": true,/]
|
||||
|
@ -131,8 +131,8 @@ all dynamic `indices.recovery` settings:
|
|||
--------------------------------------------------
|
||||
PUT /_cluster/settings
|
||||
{
|
||||
"transient" : {
|
||||
"indices.recovery.*" : null
|
||||
}
|
||||
"transient" : {
|
||||
"indices.recovery.*" : null
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -24,8 +24,8 @@ an initial sequence number and primary term:
|
|||
--------------------------------------------------
|
||||
PUT products/_doc/1567
|
||||
{
|
||||
"product" : "r2d2",
|
||||
"details" : "A resourceful astromech droid"
|
||||
"product" : "r2d2",
|
||||
"details" : "A resourceful astromech droid"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -35,21 +35,22 @@ You can see the assigned sequence number and primary term in the
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_shards" : {
|
||||
"total" : 2,
|
||||
"failed" : 0,
|
||||
"successful" : 1
|
||||
},
|
||||
"_index" : "products",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1567",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 362,
|
||||
"_primary_term" : 2,
|
||||
"result" : "created"
|
||||
"_shards": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"successful": 1
|
||||
},
|
||||
"_index": "products",
|
||||
"_type": "_doc",
|
||||
"_id": "1567",
|
||||
"_version": 1,
|
||||
"_seq_no": 362,
|
||||
"_primary_term": 2,
|
||||
"result": "created"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 2/"_primary_term" : $body._primary_term/]
|
||||
// TESTRESPONSE[s/"_seq_no": 362/"_seq_no": $body._seq_no/]
|
||||
// TESTRESPONSE[s/"_primary_term": 2/"_primary_term": $body._primary_term/]
|
||||
|
||||
|
||||
Elasticsearch keeps tracks of the sequence number and primary term of the last
|
||||
|
@ -68,20 +69,21 @@ returns:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_index" : "products",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1567",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 362,
|
||||
"_primary_term" : 2,
|
||||
"found": true,
|
||||
"_source" : {
|
||||
"product" : "r2d2",
|
||||
"details" : "A resourceful astromech droid"
|
||||
}
|
||||
"_index": "products",
|
||||
"_type": "_doc",
|
||||
"_id": "1567",
|
||||
"_version": 1,
|
||||
"_seq_no": 362,
|
||||
"_primary_term": 2,
|
||||
"found": true,
|
||||
"_source": {
|
||||
"product": "r2d2",
|
||||
"details": "A resourceful astromech droid"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 2/"_primary_term" : $body._primary_term/]
|
||||
// TESTRESPONSE[s/"_seq_no": 362/"_seq_no": $body._seq_no/]
|
||||
// TESTRESPONSE[s/"_primary_term": 2/"_primary_term": $body._primary_term/]
|
||||
|
||||
|
||||
Note: The <<search-search,Search API>> can return the `_seq_no` and `_primary_term`
|
||||
|
@ -102,9 +104,9 @@ of another tag by another API:
|
|||
--------------------------------------------------
|
||||
PUT products/_doc/1567?if_seq_no=362&if_primary_term=2
|
||||
{
|
||||
"product" : "r2d2",
|
||||
"details" : "A resourceful astromech droid",
|
||||
"tags": ["droid"]
|
||||
"product": "r2d2",
|
||||
"details": "A resourceful astromech droid",
|
||||
"tags": [ "droid" ]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -65,7 +65,7 @@ Example to delete with routing
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_doc/1?routing=kimchy
|
||||
{
|
||||
"test": "test"
|
||||
"test": "test"
|
||||
}
|
||||
--------------------------------------------------
|
||||
////
|
||||
|
@ -179,20 +179,20 @@ The API returns the following result:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_shards" : {
|
||||
"total" : 2,
|
||||
"failed" : 0,
|
||||
"successful" : 2
|
||||
},
|
||||
"_index" : "twitter",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1",
|
||||
"_version" : 2,
|
||||
"_primary_term": 1,
|
||||
"_seq_no": 5,
|
||||
"result": "deleted"
|
||||
"_shards": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"successful": 2
|
||||
},
|
||||
"_index": "twitter",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_version": 2,
|
||||
"_primary_term": 1,
|
||||
"_seq_no": 5,
|
||||
"result": "deleted"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"successful" : 2/"successful" : 1/]
|
||||
// TESTRESPONSE[s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
// TESTRESPONSE[s/"_seq_no" : 5/"_seq_no" : $body._seq_no/]
|
||||
// TESTRESPONSE[s/"successful": 2/"successful": 1/]
|
||||
// TESTRESPONSE[s/"_primary_term": 1/"_primary_term": $body._primary_term/]
|
||||
// TESTRESPONSE[s/"_seq_no": 5/"_seq_no": $body._seq_no/]
|
||||
|
|
|
@ -234,22 +234,22 @@ The API returns the following result:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_index" : "twitter",
|
||||
"_type" : "_doc",
|
||||
"_id" : "0",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 10,
|
||||
"_primary_term" : 1,
|
||||
"found": true,
|
||||
"_source" : {
|
||||
"user" : "kimchy",
|
||||
"date" : "2009-11-15T14:12:12",
|
||||
"likes": 0,
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
"_index": "twitter",
|
||||
"_type": "_doc",
|
||||
"_id": "0",
|
||||
"_version": 1,
|
||||
"_seq_no": 10,
|
||||
"_primary_term": 1,
|
||||
"found": true,
|
||||
"_source": {
|
||||
"user": "kimchy",
|
||||
"date": "2009-11-15T14:12:12",
|
||||
"likes": 0,
|
||||
"message": "trying out Elasticsearch"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]
|
||||
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no": $body._seq_no/ s/"_primary_term": 1/"_primary_term": $body._primary_term/]
|
||||
|
||||
Check to see if a document with the `_id` 0 exists:
|
||||
|
||||
|
@ -327,8 +327,8 @@ Now we can add a document:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_doc/1
|
||||
{
|
||||
"counter" : 1,
|
||||
"tags" : ["red"]
|
||||
"counter": 1,
|
||||
"tags": [ "red" ]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -371,8 +371,8 @@ You can also retrieve metadata fields like the `_routing` field:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_doc/2?routing=user1
|
||||
{
|
||||
"counter" : 1,
|
||||
"tags" : ["white"]
|
||||
"counter" : 1,
|
||||
"tags" : ["white"]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -190,23 +190,23 @@ creation of indices. It does not affect the creation of data streams.
|
|||
--------------------------------------------------
|
||||
PUT _cluster/settings
|
||||
{
|
||||
"persistent": {
|
||||
"action.auto_create_index": "twitter,index10,-index1*,+ind*" <1>
|
||||
}
|
||||
"persistent": {
|
||||
"action.auto_create_index": "twitter,index10,-index1*,+ind*" <1>
|
||||
}
|
||||
}
|
||||
|
||||
PUT _cluster/settings
|
||||
{
|
||||
"persistent": {
|
||||
"action.auto_create_index": "false" <2>
|
||||
}
|
||||
"persistent": {
|
||||
"action.auto_create_index": "false" <2>
|
||||
}
|
||||
}
|
||||
|
||||
PUT _cluster/settings
|
||||
{
|
||||
"persistent": {
|
||||
"action.auto_create_index": "true" <3>
|
||||
}
|
||||
"persistent": {
|
||||
"action.auto_create_index": "true" <3>
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -239,9 +239,9 @@ the document.
|
|||
--------------------------------------------------
|
||||
POST twitter/_doc/
|
||||
{
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -250,21 +250,21 @@ The API returns the following result:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_shards" : {
|
||||
"total" : 2,
|
||||
"failed" : 0,
|
||||
"successful" : 2
|
||||
},
|
||||
"_index" : "twitter",
|
||||
"_type" : "_doc",
|
||||
"_id" : "W0tpsmIBdwcYyG50zbta",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 0,
|
||||
"_primary_term" : 1,
|
||||
"result": "created"
|
||||
"_shards": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"successful": 2
|
||||
},
|
||||
"_index": "twitter",
|
||||
"_type": "_doc",
|
||||
"_id": "W0tpsmIBdwcYyG50zbta",
|
||||
"_version": 1,
|
||||
"_seq_no": 0,
|
||||
"_primary_term": 1,
|
||||
"result": "created"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful" : 2/"successful" : 1/]
|
||||
// TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful": 2/"successful": 1/]
|
||||
|
||||
[float]
|
||||
[[optimistic-concurrency-control-index]]
|
||||
|
@ -289,9 +289,9 @@ on a per-operation basis using the `routing` parameter. For example:
|
|||
--------------------------------------------------
|
||||
POST twitter/_doc?routing=kimchy
|
||||
{
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -366,11 +366,11 @@ replication succeeded/failed.
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_shards" : {
|
||||
"total" : 2,
|
||||
"failed" : 0,
|
||||
"successful" : 2
|
||||
}
|
||||
"_shards": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"successful": 2
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
@ -414,9 +414,9 @@ to 5 minutes:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_doc/1?timeout=5m
|
||||
{
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -443,7 +443,7 @@ conflict will occur and the index operation will fail. For example:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_doc/1?version=2&version_type=external
|
||||
{
|
||||
"message" : "elasticsearch now has versioning support, double cool!"
|
||||
"message" : "elasticsearch now has versioning support, double cool!"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -501,9 +501,9 @@ Insert a JSON document into the `twitter` index with an `_id` of 1:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_doc/1
|
||||
{
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -512,21 +512,21 @@ The API returns the following result:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_shards" : {
|
||||
"total" : 2,
|
||||
"failed" : 0,
|
||||
"successful" : 2
|
||||
},
|
||||
"_index" : "twitter",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1",
|
||||
"_version" : 1,
|
||||
"_seq_no" : 0,
|
||||
"_primary_term" : 1,
|
||||
"result" : "created"
|
||||
"_shards": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"successful": 2
|
||||
},
|
||||
"_index": "twitter",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_version": 1,
|
||||
"_seq_no": 0,
|
||||
"_primary_term": 1,
|
||||
"result": "created"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"successful" : 2/"successful" : 1/]
|
||||
// TESTRESPONSE[s/"successful": 2/"successful": 1/]
|
||||
|
||||
Use the `_create` resource to index a document into the `twitter` index if
|
||||
no document with that ID exists:
|
||||
|
@ -535,9 +535,9 @@ no document with that ID exists:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_create/1
|
||||
{
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -548,8 +548,8 @@ index if no document with that ID exists:
|
|||
--------------------------------------------------
|
||||
PUT twitter/_doc/1?op_type=create
|
||||
{
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
"user" : "kimchy",
|
||||
"post_date" : "2009-11-15T14:12:12",
|
||||
"message" : "trying out Elasticsearch"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -10,16 +10,16 @@ Retrieves multiple JSON documents by ID.
|
|||
--------------------------------------------------
|
||||
GET /_mget
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_index" : "twitter",
|
||||
"_id" : "1"
|
||||
},
|
||||
{
|
||||
"_index" : "twitter",
|
||||
"_id" : "2"
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_index": "twitter",
|
||||
"_id": "1"
|
||||
},
|
||||
{
|
||||
"_index": "twitter",
|
||||
"_id": "2"
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -130,16 +130,16 @@ If you specify an index in the request URI, only the document IDs are required i
|
|||
--------------------------------------------------
|
||||
GET /twitter/_mget
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_type" : "_doc",
|
||||
"_id" : "1"
|
||||
},
|
||||
{
|
||||
"_type" : "_doc",
|
||||
"_id" : "2"
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_type": "_doc",
|
||||
"_id": "1"
|
||||
},
|
||||
{
|
||||
"_type": "_doc",
|
||||
"_id": "2"
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -149,14 +149,14 @@ And type:
|
|||
--------------------------------------------------
|
||||
GET /test/_doc/_mget
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_id" : "1"
|
||||
},
|
||||
{
|
||||
"_id" : "2"
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_id": "1"
|
||||
},
|
||||
{
|
||||
"_id": "2"
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -167,7 +167,7 @@ You can use the `ids` element to simplify the request:
|
|||
--------------------------------------------------
|
||||
GET /twitter/_mget
|
||||
{
|
||||
"ids" : ["1", "2"]
|
||||
"ids" : ["1", "2"]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -189,29 +189,29 @@ from document 3 but filters out the `user.location` field.
|
|||
--------------------------------------------------
|
||||
GET /_mget
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1",
|
||||
"_source" : false
|
||||
},
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "2",
|
||||
"_source" : ["field3", "field4"]
|
||||
},
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "3",
|
||||
"_source" : {
|
||||
"include": ["user"],
|
||||
"exclude": ["user.location"]
|
||||
}
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_source": false
|
||||
},
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"_source": [ "field3", "field4" ]
|
||||
},
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "3",
|
||||
"_source": {
|
||||
"include": [ "user" ],
|
||||
"exclude": [ "user.location" ]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -230,20 +230,20 @@ For example, the following request retrieves `field1` and `field2` from document
|
|||
--------------------------------------------------
|
||||
GET /_mget
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1",
|
||||
"stored_fields" : ["field1", "field2"]
|
||||
},
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "2",
|
||||
"stored_fields" : ["field3", "field4"]
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"stored_fields": [ "field1", "field2" ]
|
||||
},
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"stored_fields": [ "field3", "field4" ]
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -255,15 +255,15 @@ overridden to return `field3` and `field4` for document 2.
|
|||
--------------------------------------------------
|
||||
GET /test/_doc/_mget?stored_fields=field1,field2
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_id" : "1"
|
||||
},
|
||||
{
|
||||
"_id" : "2",
|
||||
"stored_fields" : ["field3", "field4"]
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_id": "1"
|
||||
},
|
||||
{
|
||||
"_id": "2",
|
||||
"stored_fields": [ "field3", "field4" ]
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -278,18 +278,18 @@ and fetches `test/_doc/1` from the shard corresponding to routing key `key2`.
|
|||
--------------------------------------------------
|
||||
GET /_mget?routing=key1
|
||||
{
|
||||
"docs" : [
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "1",
|
||||
"routing" : "key2"
|
||||
},
|
||||
{
|
||||
"_index" : "test",
|
||||
"_type" : "_doc",
|
||||
"_id" : "2"
|
||||
}
|
||||
]
|
||||
"docs": [
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"routing": "key2"
|
||||
},
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "_doc",
|
||||
"_id": "2"
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -114,13 +114,13 @@ following simplified syntax:
|
|||
--------------------------------------------------
|
||||
POST /twitter/_mtermvectors
|
||||
{
|
||||
"ids" : ["1", "2"],
|
||||
"parameters": {
|
||||
"fields": [
|
||||
"message"
|
||||
],
|
||||
"term_statistics": true
|
||||
}
|
||||
"ids": [ "1", "2" ],
|
||||
"parameters": {
|
||||
"fields": [
|
||||
"message"
|
||||
],
|
||||
"term_statistics": true
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
|
|
@ -246,61 +246,61 @@ Response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_id": "1",
|
||||
"_index": "twitter",
|
||||
"_id": "1",
|
||||
"_index": "twitter",
|
||||
"_type": "_doc",
|
||||
"_version": 1,
|
||||
"found": true,
|
||||
"took": 6,
|
||||
"term_vectors": {
|
||||
"text": {
|
||||
"field_statistics": {
|
||||
"doc_count": 2,
|
||||
"sum_doc_freq": 6,
|
||||
"sum_ttf": 8
|
||||
"_version": 1,
|
||||
"found": true,
|
||||
"took": 6,
|
||||
"term_vectors": {
|
||||
"text": {
|
||||
"field_statistics": {
|
||||
"doc_count": 2,
|
||||
"sum_doc_freq": 6,
|
||||
"sum_ttf": 8
|
||||
},
|
||||
"terms": {
|
||||
"test": {
|
||||
"doc_freq": 2,
|
||||
"term_freq": 3,
|
||||
"tokens": [
|
||||
{
|
||||
"end_offset": 12,
|
||||
"payload": "d29yZA==",
|
||||
"position": 1,
|
||||
"start_offset": 8
|
||||
},
|
||||
"terms": {
|
||||
"test": {
|
||||
"doc_freq": 2,
|
||||
"term_freq": 3,
|
||||
"tokens": [
|
||||
{
|
||||
"end_offset": 12,
|
||||
"payload": "d29yZA==",
|
||||
"position": 1,
|
||||
"start_offset": 8
|
||||
},
|
||||
{
|
||||
"end_offset": 17,
|
||||
"payload": "d29yZA==",
|
||||
"position": 2,
|
||||
"start_offset": 13
|
||||
},
|
||||
{
|
||||
"end_offset": 22,
|
||||
"payload": "d29yZA==",
|
||||
"position": 3,
|
||||
"start_offset": 18
|
||||
}
|
||||
],
|
||||
"ttf": 4
|
||||
},
|
||||
"twitter": {
|
||||
"doc_freq": 2,
|
||||
"term_freq": 1,
|
||||
"tokens": [
|
||||
{
|
||||
"end_offset": 7,
|
||||
"payload": "d29yZA==",
|
||||
"position": 0,
|
||||
"start_offset": 0
|
||||
}
|
||||
],
|
||||
"ttf": 2
|
||||
}
|
||||
{
|
||||
"end_offset": 17,
|
||||
"payload": "d29yZA==",
|
||||
"position": 2,
|
||||
"start_offset": 13
|
||||
},
|
||||
{
|
||||
"end_offset": 22,
|
||||
"payload": "d29yZA==",
|
||||
"position": 3,
|
||||
"start_offset": 18
|
||||
}
|
||||
],
|
||||
"ttf": 4
|
||||
},
|
||||
"twitter": {
|
||||
"doc_freq": 2,
|
||||
"term_freq": 1,
|
||||
"tokens": [
|
||||
{
|
||||
"end_offset": 7,
|
||||
"payload": "d29yZA==",
|
||||
"position": 0,
|
||||
"start_offset": 0
|
||||
}
|
||||
],
|
||||
"ttf": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -427,18 +427,18 @@ their tf-idf must be too low.
|
|||
--------------------------------------------------
|
||||
GET /imdb/_termvectors
|
||||
{
|
||||
"doc": {
|
||||
"plot": "When wealthy industrialist Tony Stark is forced to build an armored suit after a life-threatening incident, he ultimately decides to use its technology to fight against evil."
|
||||
},
|
||||
"term_statistics" : true,
|
||||
"field_statistics" : true,
|
||||
"positions": false,
|
||||
"offsets": false,
|
||||
"filter" : {
|
||||
"max_num_terms" : 3,
|
||||
"min_term_freq" : 1,
|
||||
"min_doc_freq" : 1
|
||||
}
|
||||
"doc": {
|
||||
"plot": "When wealthy industrialist Tony Stark is forced to build an armored suit after a life-threatening incident, he ultimately decides to use its technology to fight against evil."
|
||||
},
|
||||
"term_statistics": true,
|
||||
"field_statistics": true,
|
||||
"positions": false,
|
||||
"offsets": false,
|
||||
"filter": {
|
||||
"max_num_terms": 3,
|
||||
"min_term_freq": 1,
|
||||
"min_doc_freq": 1
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[skip:no imdb test index]
|
||||
|
|
|
@ -86,8 +86,8 @@ First, let's index a simple doc:
|
|||
--------------------------------------------------
|
||||
PUT test/_doc/1
|
||||
{
|
||||
"counter" : 1,
|
||||
"tags" : ["red"]
|
||||
"counter" : 1,
|
||||
"tags" : ["red"]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -98,13 +98,13 @@ following script:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : {
|
||||
"source": "ctx._source.counter += params.count",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
}
|
||||
"script" : {
|
||||
"source": "ctx._source.counter += params.count",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -116,13 +116,13 @@ Similarly, you could use and update script to add a tag to the list of tags
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : {
|
||||
"source": "ctx._source.tags.add(params.tag)",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"tag" : "blue"
|
||||
}
|
||||
"script": {
|
||||
"source": "ctx._source.tags.add(params.tag)",
|
||||
"lang": "painless",
|
||||
"params": {
|
||||
"tag": "blue"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -137,13 +137,13 @@ script just removes one occurrence.
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : {
|
||||
"source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"tag" : "blue"
|
||||
}
|
||||
"script": {
|
||||
"source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }",
|
||||
"lang": "painless",
|
||||
"params": {
|
||||
"tag": "blue"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -155,7 +155,7 @@ adds the field `new_field`:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : "ctx._source.new_field = 'value_of_new_field'"
|
||||
"script" : "ctx._source.new_field = 'value_of_new_field'"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -166,7 +166,7 @@ Conversely, this script removes the field `new_field`:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : "ctx._source.remove('new_field')"
|
||||
"script" : "ctx._source.remove('new_field')"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -179,13 +179,13 @@ the `tags` field contains `green`, otherwise it does nothing (`noop`):
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : {
|
||||
"source": "if (ctx._source.tags.contains(params.tag)) { ctx.op = 'delete' } else { ctx.op = 'none' }",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"tag" : "green"
|
||||
}
|
||||
"script": {
|
||||
"source": "if (ctx._source.tags.contains(params.tag)) { ctx.op = 'delete' } else { ctx.op = 'none' }",
|
||||
"lang": "painless",
|
||||
"params": {
|
||||
"tag": "green"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -200,9 +200,9 @@ existing document:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"doc" : {
|
||||
"name" : "new_name"
|
||||
}
|
||||
"doc": {
|
||||
"name": "new_name"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -220,9 +220,9 @@ anything and return `"result": "noop"`:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"doc" : {
|
||||
"name" : "new_name"
|
||||
}
|
||||
"doc": {
|
||||
"name": "new_name"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -254,10 +254,10 @@ You can disable this behavior by setting `"detect_noop": false`:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"doc" : {
|
||||
"name" : "new_name"
|
||||
},
|
||||
"detect_noop": false
|
||||
"doc": {
|
||||
"name": "new_name"
|
||||
},
|
||||
"detect_noop": false
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -274,16 +274,16 @@ are inserted as a new document. If the document exists, the
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"script" : {
|
||||
"source": "ctx._source.counter += params.count",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
}
|
||||
},
|
||||
"upsert" : {
|
||||
"counter" : 1
|
||||
"script": {
|
||||
"source": "ctx._source.counter += params.count",
|
||||
"lang": "painless",
|
||||
"params": {
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"upsert": {
|
||||
"counter": 1
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -299,18 +299,18 @@ To run the script whether or not the document exists, set `scripted_upsert` to
|
|||
--------------------------------------------------
|
||||
POST sessions/_update/dh3sgudg8gsrgl
|
||||
{
|
||||
"scripted_upsert":true,
|
||||
"script" : {
|
||||
"id": "my_web_session_summariser",
|
||||
"params" : {
|
||||
"pageViewEvent" : {
|
||||
"url":"foo.com/bar",
|
||||
"response":404,
|
||||
"time":"2014-01-01 12:32"
|
||||
}
|
||||
}
|
||||
},
|
||||
"upsert" : {}
|
||||
"scripted_upsert": true,
|
||||
"script": {
|
||||
"id": "my_web_session_summariser",
|
||||
"params": {
|
||||
"pageViewEvent": {
|
||||
"url": "foo.com/bar",
|
||||
"response": 404,
|
||||
"time": "2014-01-01 12:32"
|
||||
}
|
||||
}
|
||||
},
|
||||
"upsert": {}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/"id": "my_web_session_summariser"/"source": "ctx._source.page_view_event = params.pageViewEvent"/]
|
||||
|
@ -328,10 +328,10 @@ value:
|
|||
--------------------------------------------------
|
||||
POST test/_update/1
|
||||
{
|
||||
"doc" : {
|
||||
"name" : "new_name"
|
||||
},
|
||||
"doc_as_upsert" : true
|
||||
"doc": {
|
||||
"name": "new_name"
|
||||
},
|
||||
"doc_as_upsert": true
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -333,17 +333,17 @@ To get some data into {es} that you can start searching and analyzing:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"account_number": 0,
|
||||
"balance": 16623,
|
||||
"firstname": "Bradshaw",
|
||||
"lastname": "Mckenzie",
|
||||
"age": 29,
|
||||
"gender": "F",
|
||||
"address": "244 Columbus Place",
|
||||
"employer": "Euron",
|
||||
"email": "bradshawmckenzie@euron.com",
|
||||
"city": "Hobucken",
|
||||
"state": "CO"
|
||||
"account_number": 0,
|
||||
"balance": 16623,
|
||||
"firstname": "Bradshaw",
|
||||
"lastname": "Mckenzie",
|
||||
"age": 29,
|
||||
"gender": "F",
|
||||
"address": "244 Columbus Place",
|
||||
"employer": "Euron",
|
||||
"email": "bradshawmckenzie@euron.com",
|
||||
"city": "Hobucken",
|
||||
"state": "CO"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
|
|
@ -201,23 +201,23 @@ An initial search typically begins with a query to identify strongly related ter
|
|||
--------------------------------------------------
|
||||
POST clicklogs/_graph/explore
|
||||
{
|
||||
"query": { <1>
|
||||
"match": {
|
||||
"query.raw": "midi"
|
||||
}
|
||||
},
|
||||
"vertices": [ <2>
|
||||
{
|
||||
"field": "product"
|
||||
}
|
||||
],
|
||||
"connections": { <3>
|
||||
"vertices": [
|
||||
{
|
||||
"field": "query.raw"
|
||||
}
|
||||
]
|
||||
"query": { <1>
|
||||
"match": {
|
||||
"query.raw": "midi"
|
||||
}
|
||||
},
|
||||
"vertices": [ <2>
|
||||
{
|
||||
"field": "product"
|
||||
}
|
||||
],
|
||||
"connections": { <3>
|
||||
"vertices": [
|
||||
{
|
||||
"field": "query.raw"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -306,51 +306,51 @@ every document could be of interest, see the
|
|||
--------------------------------------------------
|
||||
POST clicklogs/_graph/explore
|
||||
{
|
||||
"query": {
|
||||
"match": {
|
||||
"query.raw": "midi"
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"query.raw": "midi"
|
||||
}
|
||||
},
|
||||
"controls": {
|
||||
"use_significance": false, <1>
|
||||
"sample_size": 2000, <2>
|
||||
"timeout": 2000, <3>
|
||||
"sample_diversity": { <4>
|
||||
"field": "category.raw",
|
||||
"max_docs_per_value": 500
|
||||
}
|
||||
},
|
||||
"vertices": [
|
||||
{
|
||||
"field": "product",
|
||||
"size": 5, <5>
|
||||
"min_doc_count": 10, <6>
|
||||
"shard_min_doc_count": 3 <7>
|
||||
}
|
||||
],
|
||||
"connections": {
|
||||
"query": { <8>
|
||||
"bool": {
|
||||
"filter": [
|
||||
{
|
||||
"range": {
|
||||
"query_time": {
|
||||
"gte": "2015-10-01 00:00:00"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"controls": {
|
||||
"use_significance": false,<1>
|
||||
"sample_size": 2000,<2>
|
||||
"timeout": 2000,<3>
|
||||
"sample_diversity": {<4>
|
||||
"field": "category.raw",
|
||||
"max_docs_per_value": 500
|
||||
}
|
||||
},
|
||||
"vertices": [
|
||||
"vertices": [
|
||||
{
|
||||
"field": "product",
|
||||
"size": 5,<5>
|
||||
"min_doc_count": 10,<6>
|
||||
"shard_min_doc_count": 3<7>
|
||||
"field": "query.raw",
|
||||
"size": 5,
|
||||
"min_doc_count": 10,
|
||||
"shard_min_doc_count": 3
|
||||
}
|
||||
],
|
||||
"connections": {
|
||||
"query": {<8>
|
||||
"bool": {
|
||||
"filter": [
|
||||
{
|
||||
"range": {
|
||||
"query_time": {
|
||||
"gte": "2015-10-01 00:00:00"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"vertices": [
|
||||
{
|
||||
"field": "query.raw",
|
||||
"size": 5,
|
||||
"min_doc_count": 10,
|
||||
"shard_min_doc_count": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -104,16 +104,16 @@ look like this:
|
|||
--------------------------------------------------
|
||||
PUT index
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"body": {
|
||||
"type": "text"
|
||||
},
|
||||
"pagerank": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"body": {
|
||||
"type": "text"
|
||||
},
|
||||
"pagerank": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -123,16 +123,16 @@ PUT index
|
|||
--------------------------------------------------
|
||||
GET index/_search
|
||||
{
|
||||
"query" : {
|
||||
"script_score" : {
|
||||
"query" : {
|
||||
"match": { "body": "elasticsearch" }
|
||||
},
|
||||
"script" : {
|
||||
"source" : "_score * saturation(doc['pagerank'].value, 10)" <1>
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"script_score": {
|
||||
"query": {
|
||||
"match": { "body": "elasticsearch" }
|
||||
},
|
||||
"script": {
|
||||
"source": "_score * saturation(doc['pagerank'].value, 10)" <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
//TEST[continued]
|
||||
|
@ -148,16 +148,16 @@ look like below:
|
|||
--------------------------------------------------
|
||||
PUT index
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"body": {
|
||||
"type": "text"
|
||||
},
|
||||
"pagerank": {
|
||||
"type": "rank_feature"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"body": {
|
||||
"type": "text"
|
||||
},
|
||||
"pagerank": {
|
||||
"type": "rank_feature"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST
|
||||
|
@ -168,21 +168,21 @@ PUT index
|
|||
--------------------------------------------------
|
||||
GET _search
|
||||
{
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"must": {
|
||||
"match": { "body": "elasticsearch" }
|
||||
},
|
||||
"should": {
|
||||
"rank_feature": {
|
||||
"field": "pagerank", <1>
|
||||
"saturation": {
|
||||
"pivot": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match": { "body": "elasticsearch" }
|
||||
},
|
||||
"should": {
|
||||
"rank_feature": {
|
||||
"field": "pagerank", <1>
|
||||
"saturation": {
|
||||
"pivot": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -150,11 +150,11 @@ The API returns following response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"acknowledged" : true,
|
||||
"shards_acknowledged" : true,
|
||||
"indices" : [ {
|
||||
"name" : "my_index",
|
||||
"blocked" : true
|
||||
} ]
|
||||
"acknowledged" : true,
|
||||
"shards_acknowledged" : true,
|
||||
"indices" : [ {
|
||||
"name" : "my_index",
|
||||
"blocked" : true
|
||||
} ]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -16,19 +16,19 @@ For instance the following example shows how to define a sort on a single field:
|
|||
--------------------------------------------------
|
||||
PUT twitter
|
||||
{
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"sort.field" : "date", <1>
|
||||
"sort.order" : "desc" <2>
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"sort.field": "date", <1>
|
||||
"sort.order": "desc" <2>
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -41,23 +41,23 @@ It is also possible to sort the index by more than one field:
|
|||
--------------------------------------------------
|
||||
PUT twitter
|
||||
{
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"sort.field" : ["username", "date"], <1>
|
||||
"sort.order" : ["asc", "desc"] <2>
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "keyword",
|
||||
"doc_values": true
|
||||
},
|
||||
"date": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"sort.field": [ "username", "date" ], <1>
|
||||
"sort.order": [ "asc", "desc" ] <2>
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "keyword",
|
||||
"doc_values": true
|
||||
},
|
||||
"date": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -114,19 +114,19 @@ For example, let's say we have an index that contains events sorted by a timesta
|
|||
--------------------------------------------------
|
||||
PUT events
|
||||
{
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"sort.field" : "timestamp",
|
||||
"sort.order" : "desc" <1>
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"sort.field": "timestamp",
|
||||
"sort.order": "desc" <1>
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -138,10 +138,10 @@ You can search for the last 10 events with:
|
|||
--------------------------------------------------
|
||||
GET /events/_search
|
||||
{
|
||||
"size": 10,
|
||||
"sort": [
|
||||
{ "timestamp": "desc" }
|
||||
]
|
||||
"size": 10,
|
||||
"sort": [
|
||||
{ "timestamp": "desc" }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -159,11 +159,11 @@ to false:
|
|||
--------------------------------------------------
|
||||
GET /events/_search
|
||||
{
|
||||
"size": 10,
|
||||
"sort": [ <1>
|
||||
{ "timestamp": "desc" }
|
||||
],
|
||||
"track_total_hits": false
|
||||
"size": 10,
|
||||
"sort": [ <1>
|
||||
{ "timestamp": "desc" }
|
||||
],
|
||||
"track_total_hits": false
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -33,15 +33,15 @@ All of the above settings are _dynamic_ and can be set for each index using the
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_settings
|
||||
{
|
||||
"index.search.slowlog.threshold.query.warn": "10s",
|
||||
"index.search.slowlog.threshold.query.info": "5s",
|
||||
"index.search.slowlog.threshold.query.debug": "2s",
|
||||
"index.search.slowlog.threshold.query.trace": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.warn": "1s",
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.level": "info"
|
||||
"index.search.slowlog.threshold.query.warn": "10s",
|
||||
"index.search.slowlog.threshold.query.info": "5s",
|
||||
"index.search.slowlog.threshold.query.debug": "2s",
|
||||
"index.search.slowlog.threshold.query.trace": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.warn": "1s",
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.level": "info"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -143,12 +143,12 @@ All of the above settings are _dynamic_ and can be set for each index using the
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_settings
|
||||
{
|
||||
"index.indexing.slowlog.threshold.index.warn": "10s",
|
||||
"index.indexing.slowlog.threshold.index.info": "5s",
|
||||
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||
"index.indexing.slowlog.level": "info",
|
||||
"index.indexing.slowlog.source": "1000"
|
||||
"index.indexing.slowlog.threshold.index.warn": "10s",
|
||||
"index.indexing.slowlog.threshold.index.info": "5s",
|
||||
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||
"index.indexing.slowlog.level": "info",
|
||||
"index.indexing.slowlog.source": "1000"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
|
|
@ -85,11 +85,11 @@ with a mapping for the `user_id` field:
|
|||
--------------------------------------------------
|
||||
PUT /users
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"user_id" : {"type" : "integer"}
|
||||
}
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"user_id" : {"type" : "integer"}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -99,12 +99,12 @@ Then add the index alias for a specific user, `user_12`:
|
|||
--------------------------------------------------
|
||||
PUT /users/_alias/user_12
|
||||
{
|
||||
"routing" : "12",
|
||||
"filter" : {
|
||||
"term" : {
|
||||
"user_id" : 12
|
||||
}
|
||||
"routing" : "12",
|
||||
"filter" : {
|
||||
"term" : {
|
||||
"user_id" : 12
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -119,18 +119,18 @@ to add an index alias during index creation.
|
|||
--------------------------------------------------
|
||||
PUT /logs_20302801
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"year" : {"type" : "integer"}
|
||||
}
|
||||
},
|
||||
"aliases" : {
|
||||
"current_day" : {},
|
||||
"2030" : {
|
||||
"filter" : {
|
||||
"term" : {"year" : 2030 }
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"year": { "type": "integer" }
|
||||
}
|
||||
},
|
||||
"aliases": {
|
||||
"current_day": {},
|
||||
"2030": {
|
||||
"filter": {
|
||||
"term": { "year": 2030 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -12,9 +12,9 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
|
|||
----
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "twitter", "alias" : "alias1" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "twitter", "alias" : "alias1" } }
|
||||
]
|
||||
}
|
||||
----
|
||||
// TEST[setup:twitter]
|
||||
|
@ -170,9 +170,9 @@ The following request adds the `alias1` alias to the `test1` index.
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "test1", "alias" : "alias1" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "test1", "alias" : "alias1" } }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test1\nPUT test2\n/]
|
||||
|
@ -186,9 +186,9 @@ The following request removes the `alias1` alias.
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "remove" : { "index" : "test1", "alias" : "alias1" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "remove" : { "index" : "test1", "alias" : "alias1" } }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -204,10 +204,10 @@ period of time where the alias does not point to an index:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "remove" : { "index" : "test1", "alias" : "alias1" } },
|
||||
{ "add" : { "index" : "test1", "alias" : "alias2" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "remove" : { "index" : "test1", "alias" : "alias1" } },
|
||||
{ "add" : { "index" : "test1", "alias" : "alias2" } }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -222,10 +222,10 @@ actions:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "test1", "alias" : "alias1" } },
|
||||
{ "add" : { "index" : "test2", "alias" : "alias1" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "test1", "alias" : "alias1" } },
|
||||
{ "add" : { "index" : "test2", "alias" : "alias1" } }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test1\nPUT test2\n/]
|
||||
|
@ -236,9 +236,9 @@ Multiple indices can be specified for an action with the `indices` array syntax:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "add" : { "indices" : ["test1", "test2"], "alias" : "alias1" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "add" : { "indices" : ["test1", "test2"], "alias" : "alias1" } }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test1\nPUT test2\n/]
|
||||
|
@ -253,9 +253,9 @@ more than one index that share a common name:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "test*", "alias" : "all_test_indices" } }
|
||||
]
|
||||
"actions" : [
|
||||
{ "add" : { "index" : "test*", "alias" : "all_test_indices" } }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test1\nPUT test2\n/]
|
||||
|
@ -278,10 +278,10 @@ PUT test <1>
|
|||
PUT test_2 <2>
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{ "add": { "index": "test_2", "alias": "test" } },
|
||||
{ "remove_index": { "index": "test" } } <3>
|
||||
]
|
||||
"actions" : [
|
||||
{ "add": { "index": "test_2", "alias": "test" } },
|
||||
{ "remove_index": { "index": "test" } } <3>
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -320,15 +320,15 @@ Now we can create an alias that uses a filter on field `user`:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test1",
|
||||
"alias" : "alias2",
|
||||
"filter" : { "term" : { "user" : "kimchy" } }
|
||||
}
|
||||
}
|
||||
]
|
||||
"actions": [
|
||||
{
|
||||
"add": {
|
||||
"index": "test1",
|
||||
"alias": "alias2",
|
||||
"filter": { "term": { "user": "kimchy" } }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -348,15 +348,15 @@ automatically modified to use value `1` for routing:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test",
|
||||
"alias" : "alias1",
|
||||
"routing" : "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
"actions": [
|
||||
{
|
||||
"add": {
|
||||
"index": "test",
|
||||
"alias": "alias1",
|
||||
"routing": "1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test\n/]
|
||||
|
@ -368,16 +368,16 @@ and indexing operations:
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test",
|
||||
"alias" : "alias2",
|
||||
"search_routing" : "1,2",
|
||||
"index_routing" : "2"
|
||||
}
|
||||
}
|
||||
]
|
||||
"actions": [
|
||||
{
|
||||
"add": {
|
||||
"index": "test",
|
||||
"alias": "alias2",
|
||||
"search_routing": "1,2",
|
||||
"index_routing": "2"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test\n/]
|
||||
|
@ -415,21 +415,21 @@ Rollover (see <<indices-rollover-index, Rollover With Write Index>>).
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test",
|
||||
"alias" : "alias1",
|
||||
"is_write_index" : true
|
||||
}
|
||||
},
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test2",
|
||||
"alias" : "alias1"
|
||||
}
|
||||
}
|
||||
]
|
||||
"actions": [
|
||||
{
|
||||
"add": {
|
||||
"index": "test",
|
||||
"alias": "alias1",
|
||||
"is_write_index": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"add": {
|
||||
"index": "test2",
|
||||
"alias": "alias1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test\nPUT test2\n/]
|
||||
|
@ -441,7 +441,7 @@ In this example, we associate the alias `alias1` to both `test` and `test2`, whe
|
|||
--------------------------------------------------
|
||||
PUT /alias1/_doc/1
|
||||
{
|
||||
"foo": "bar"
|
||||
"foo": "bar"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -462,21 +462,21 @@ do an atomic swap. The swap is not dependent on the ordering of the actions.
|
|||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test",
|
||||
"alias" : "alias1",
|
||||
"is_write_index" : false
|
||||
}
|
||||
}, {
|
||||
"add" : {
|
||||
"index" : "test2",
|
||||
"alias" : "alias1",
|
||||
"is_write_index" : true
|
||||
}
|
||||
}
|
||||
]
|
||||
"actions": [
|
||||
{
|
||||
"add": {
|
||||
"index": "test",
|
||||
"alias": "alias1",
|
||||
"is_write_index": false
|
||||
}
|
||||
}, {
|
||||
"add": {
|
||||
"index": "test2",
|
||||
"alias": "alias1",
|
||||
"is_write_index": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[s/^/PUT test\nPUT test2\n/]
|
||||
|
|
|
@ -102,34 +102,34 @@ NOTE: Using the following analyzer as an index analyzer results in an error.
|
|||
--------------------------------------------------
|
||||
PUT /my_index
|
||||
{
|
||||
"settings": {
|
||||
"index" : {
|
||||
"analysis" : {
|
||||
"analyzer" : {
|
||||
"my_synonyms" : {
|
||||
"tokenizer" : "whitespace",
|
||||
"filter" : ["synonym"]
|
||||
}
|
||||
},
|
||||
"filter" : {
|
||||
"synonym" : {
|
||||
"type" : "synonym_graph",
|
||||
"synonyms_path" : "analysis/synonym.txt", <1>
|
||||
"updateable" : true <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"text": {
|
||||
"type": "text",
|
||||
"analyzer" : "standard",
|
||||
"search_analyzer": "my_synonyms" <3>
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"my_synonyms": {
|
||||
"tokenizer": "whitespace",
|
||||
"filter": [ "synonym" ]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"synonym": {
|
||||
"type": "synonym_graph",
|
||||
"synonyms_path": "analysis/synonym.txt", <1>
|
||||
"updateable": true <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"text": {
|
||||
"type": "text",
|
||||
"analyzer": "standard",
|
||||
"search_analyzer": "my_synonyms" <3>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -153,25 +153,25 @@ The API returns the following response.
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_shards" : {
|
||||
"total" : 2,
|
||||
"successful" : 2,
|
||||
"failed" : 0
|
||||
},
|
||||
"reload_details" : [
|
||||
{
|
||||
"index" : "my_index",
|
||||
"reloaded_analyzers" : [
|
||||
"my_synonyms"
|
||||
],
|
||||
"reloaded_node_ids" : [
|
||||
"mfdqTXn_T7SGr2Ho2KT8uw"
|
||||
]
|
||||
}
|
||||
]
|
||||
"_shards": {
|
||||
"total": 2,
|
||||
"successful": 2,
|
||||
"failed": 0
|
||||
},
|
||||
"reload_details": [
|
||||
{
|
||||
"index": "my_index",
|
||||
"reloaded_analyzers": [
|
||||
"my_synonyms"
|
||||
],
|
||||
"reloaded_node_ids": [
|
||||
"mfdqTXn_T7SGr2Ho2KT8uw"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
// TESTRESPONSE[s/"total" : 2/"total" : $body._shards.total/]
|
||||
// TESTRESPONSE[s/"successful" : 2/"successful" : $body._shards.successful/]
|
||||
// TESTRESPONSE[s/"total": 2/"total": $body._shards.total/]
|
||||
// TESTRESPONSE[s/"successful": 2/"successful": $body._shards.successful/]
|
||||
// TESTRESPONSE[s/mfdqTXn_T7SGr2Ho2KT8uw/$body.reload_details.0.reloaded_node_ids.0/]
|
||||
|
|
|
@ -73,12 +73,12 @@ The API returns following response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"acknowledged" : true,
|
||||
"shards_acknowledged" : true,
|
||||
"indices" : {
|
||||
"my_index" : {
|
||||
"closed" : true
|
||||
}
|
||||
"acknowledged": true,
|
||||
"shards_acknowledged": true,
|
||||
"indices": {
|
||||
"my_index": {
|
||||
"closed": true
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -83,12 +83,12 @@ associated with it, defined in the body:
|
|||
--------------------------------------------------
|
||||
PUT /twitter
|
||||
{
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"number_of_shards" : 3, <1>
|
||||
"number_of_replicas" : 2 <2>
|
||||
}
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_shards": 3, <1>
|
||||
"number_of_replicas": 2 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -101,10 +101,10 @@ or more simplified
|
|||
--------------------------------------------------
|
||||
PUT /twitter
|
||||
{
|
||||
"settings" : {
|
||||
"number_of_shards" : 3,
|
||||
"number_of_replicas" : 2
|
||||
}
|
||||
"settings": {
|
||||
"number_of_shards": 3,
|
||||
"number_of_replicas": 2
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -125,14 +125,14 @@ The create index API allows for providing a mapping definition:
|
|||
--------------------------------------------------
|
||||
PUT /test
|
||||
{
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"field1" : { "type" : "text" }
|
||||
}
|
||||
"settings": {
|
||||
"number_of_shards": 1
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"field1": { "type": "text" }
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -149,15 +149,15 @@ The create index API allows also to provide a set of <<indices-aliases,aliases>>
|
|||
--------------------------------------------------
|
||||
PUT /test
|
||||
{
|
||||
"aliases" : {
|
||||
"alias_1" : {},
|
||||
"alias_2" : {
|
||||
"filter" : {
|
||||
"term" : {"user" : "kimchy" }
|
||||
},
|
||||
"routing" : "kimchy"
|
||||
}
|
||||
"aliases": {
|
||||
"alias_1": {},
|
||||
"alias_2": {
|
||||
"filter": {
|
||||
"term": { "user": "kimchy" }
|
||||
},
|
||||
"routing": "kimchy"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -171,9 +171,9 @@ what happened:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"acknowledged": true,
|
||||
"shards_acknowledged": true,
|
||||
"index": "test"
|
||||
"acknowledged": true,
|
||||
"shards_acknowledged": true,
|
||||
"index": "test"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -196,9 +196,9 @@ the `wait_for_active_shards` value on all subsequent write operations):
|
|||
--------------------------------------------------
|
||||
PUT /test
|
||||
{
|
||||
"settings": {
|
||||
"index.write.wait_for_active_shards": "2"
|
||||
}
|
||||
"settings": {
|
||||
"index.write.wait_for_active_shards": "2"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[skip:requires two nodes]
|
||||
|
|
|
@ -60,6 +60,6 @@ The API returns following response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"acknowledged" : true
|
||||
"acknowledged" : true
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -78,19 +78,19 @@ index with several field mappings.
|
|||
--------------------------------------------------
|
||||
PUT /publications
|
||||
{
|
||||
"mappings": {
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": { "type": "text" },
|
||||
"title": { "type": "text" },
|
||||
"abstract": { "type": "text" },
|
||||
"author": {
|
||||
"properties": {
|
||||
"id": { "type": "text" },
|
||||
"title": { "type": "text"},
|
||||
"abstract": { "type": "text"},
|
||||
"author": {
|
||||
"properties": {
|
||||
"id": { "type": "text" },
|
||||
"name": { "type": "text" }
|
||||
}
|
||||
}
|
||||
"id": { "type": "text" },
|
||||
"name": { "type": "text" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ The API returns the following response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"acknowledged" : true,
|
||||
"shards_acknowledged" : true
|
||||
"acknowledged" : true,
|
||||
"shards_acknowledged" : true
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -443,12 +443,12 @@ with `user_id` field values.
|
|||
----
|
||||
POST /users/_doc?refresh=wait_for
|
||||
{
|
||||
"user_id" : 12345
|
||||
"user_id" : 12345
|
||||
}
|
||||
|
||||
POST /users/_doc?refresh=wait_for
|
||||
{
|
||||
"user_id" : 12346
|
||||
"user_id" : 12346
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
|
|
@ -200,27 +200,27 @@ The API returns the following response:
|
|||
[source,console-response]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"_0": {
|
||||
...
|
||||
"_0": {
|
||||
...
|
||||
"ram_tree": [
|
||||
{
|
||||
"description": "postings [PerFieldPostings(format=1)]",
|
||||
"size_in_bytes": 2696,
|
||||
"children": [
|
||||
{
|
||||
"description": "format 'Lucene50_0' ...",
|
||||
"size_in_bytes": 2608,
|
||||
"children" :[ ... ]
|
||||
},
|
||||
...
|
||||
"ram_tree": [
|
||||
{
|
||||
"description": "postings [PerFieldPostings(format=1)]",
|
||||
"size_in_bytes": 2696,
|
||||
"children": [
|
||||
{
|
||||
"description": "format 'Lucene50_0' ...",
|
||||
"size_in_bytes": 2608,
|
||||
"children" :[ ... ]
|
||||
},
|
||||
...
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
|
||||
}
|
||||
...
|
||||
}
|
||||
...
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[skip:Response is too verbose to be fully shown in documentation, so we just show the relevant bit and don't test the response.]
|
||||
|
|
|
@ -146,20 +146,20 @@ You can include <<indices-aliases,index aliases>> in an index template.
|
|||
--------------------------------------------------
|
||||
PUT _template/template_1
|
||||
{
|
||||
"index_patterns" : ["te*"],
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
"index_patterns" : ["te*"],
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"aliases" : {
|
||||
"alias1" : {},
|
||||
"alias2" : {
|
||||
"filter" : {
|
||||
"term" : {"user" : "kimchy" }
|
||||
},
|
||||
"routing" : "kimchy"
|
||||
},
|
||||
"aliases" : {
|
||||
"alias1" : {},
|
||||
"alias2" : {
|
||||
"filter" : {
|
||||
"term" : {"user" : "kimchy" }
|
||||
},
|
||||
"routing" : "kimchy"
|
||||
},
|
||||
"{index}-alias" : {} <1>
|
||||
}
|
||||
"{index}-alias" : {} <1>
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -180,26 +180,26 @@ orders overriding them. For example:
|
|||
--------------------------------------------------
|
||||
PUT /_template/template_1
|
||||
{
|
||||
"index_patterns" : ["te*"],
|
||||
"order" : 0,
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"mappings" : {
|
||||
"_source" : { "enabled" : false }
|
||||
}
|
||||
"index_patterns" : ["te*"],
|
||||
"order" : 0,
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"mappings" : {
|
||||
"_source" : { "enabled" : false }
|
||||
}
|
||||
}
|
||||
|
||||
PUT /_template/template_2
|
||||
{
|
||||
"index_patterns" : ["tes*"],
|
||||
"order" : 1,
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"mappings" : {
|
||||
"_source" : { "enabled" : true }
|
||||
}
|
||||
"index_patterns" : ["tes*"],
|
||||
"order" : 1,
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"mappings" : {
|
||||
"_source" : { "enabled" : true }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -231,12 +231,12 @@ replace the template without specifying one.
|
|||
--------------------------------------------------
|
||||
PUT /_template/template_1
|
||||
{
|
||||
"index_patterns" : ["myindex-*"],
|
||||
"order" : 0,
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"version": 123
|
||||
"index_patterns" : ["myindex-*"],
|
||||
"order" : 0,
|
||||
"settings" : {
|
||||
"number_of_shards" : 1
|
||||
},
|
||||
"version": 123
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@ default.
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_settings
|
||||
{
|
||||
"index" : {
|
||||
"number_of_replicas" : 2
|
||||
}
|
||||
"index" : {
|
||||
"number_of_replicas" : 2
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -79,9 +79,9 @@ To revert a setting to the default value, use `null`. For example:
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_settings
|
||||
{
|
||||
"index" : {
|
||||
"refresh_interval" : null
|
||||
}
|
||||
"index" : {
|
||||
"refresh_interval" : null
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -103,9 +103,9 @@ use:
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_settings
|
||||
{
|
||||
"index" : {
|
||||
"refresh_interval" : "-1"
|
||||
}
|
||||
"index" : {
|
||||
"refresh_interval" : "-1"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -120,9 +120,9 @@ the defaults for example):
|
|||
--------------------------------------------------
|
||||
PUT /twitter/_settings
|
||||
{
|
||||
"index" : {
|
||||
"refresh_interval" : "1s"
|
||||
}
|
||||
"index" : {
|
||||
"refresh_interval" : "1s"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -14,20 +14,20 @@ Deletes an existing <<enrich-policy,enrich policy>> and its
|
|||
----
|
||||
PUT /users
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"email" : { "type" : "keyword" }
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"email": { "type": "keyword" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": [ "first_name", "last_name", "city", "zip", "state" ]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TESTSETUP
|
||||
|
|
|
@ -101,32 +101,32 @@ The API returns the following response:
|
|||
[source,console-result]
|
||||
----
|
||||
{
|
||||
"executing_policies": [
|
||||
{
|
||||
"name": "my-policy",
|
||||
"task": {
|
||||
"id" : 124,
|
||||
"type" : "direct",
|
||||
"action" : "cluster:admin/xpack/enrich/execute",
|
||||
"start_time_in_millis" : 1458585884904,
|
||||
"running_time_in_nanos" : 47402,
|
||||
"cancellable" : false,
|
||||
"parent_task_id" : "oTUltX4IQMOUUVeiohTt8A:123",
|
||||
"headers" : {
|
||||
"X-Opaque-Id" : "123456"
|
||||
}
|
||||
}
|
||||
"executing_policies": [
|
||||
{
|
||||
"name": "my-policy",
|
||||
"task": {
|
||||
"id": 124,
|
||||
"type": "direct",
|
||||
"action": "cluster:admin/xpack/enrich/execute",
|
||||
"start_time_in_millis": 1458585884904,
|
||||
"running_time_in_nanos": 47402,
|
||||
"cancellable": false,
|
||||
"parent_task_id": "oTUltX4IQMOUUVeiohTt8A:123",
|
||||
"headers": {
|
||||
"X-Opaque-Id": "123456"
|
||||
}
|
||||
],
|
||||
"coordinator_stats": [
|
||||
{
|
||||
"node_id": "1sFM8cmSROZYhPxVsiWew",
|
||||
"queue_size": 0,
|
||||
"remote_requests_current": 0,
|
||||
"remote_requests_total": 0,
|
||||
"executed_searches_total": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"coordinator_stats": [
|
||||
{
|
||||
"node_id": "1sFM8cmSROZYhPxVsiWew",
|
||||
"queue_size": 0,
|
||||
"remote_requests_current": 0,
|
||||
"remote_requests_total": 0,
|
||||
"executed_searches_total": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"executing_policies": \[[^\]]*\]/"executing_policies": $body.$_path/]
|
||||
|
|
|
@ -14,23 +14,23 @@ Executes an existing <<enrich-policy,enrich policy>>.
|
|||
----
|
||||
PUT /users/_doc/1?refresh
|
||||
{
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"city": "New Orleans",
|
||||
"county": "Orleans",
|
||||
"state": "LA",
|
||||
"zip": 70116,
|
||||
"web": "mardy.asciidocsmith.com"
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"city": "New Orleans",
|
||||
"county": "Orleans",
|
||||
"state": "LA",
|
||||
"zip": 70116,
|
||||
"web": "mardy.asciidocsmith.com"
|
||||
}
|
||||
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TESTSETUP
|
||||
|
|
|
@ -13,29 +13,29 @@ Returns information about an <<enrich-policy,enrich policy>>.
|
|||
----
|
||||
PUT /users
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"email" : { "type" : "keyword" }
|
||||
}
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"email" : { "type" : "keyword" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
|
||||
PUT /_enrich/policy/other-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
////
|
||||
|
@ -96,24 +96,24 @@ The API returns the following response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "my-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name": "my-policy",
|
||||
"indices": [ "users" ],
|
||||
"match_field": "email",
|
||||
"enrich_fields": [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -132,40 +132,40 @@ The API returns the following response:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "my-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "other-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name": "my-policy",
|
||||
"indices": [ "users" ],
|
||||
"match_field": "email",
|
||||
"enrich_fields": [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name": "other-policy",
|
||||
"indices": [ "users" ],
|
||||
"match_field": "email",
|
||||
"enrich_fields": [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
|
@ -185,40 +185,40 @@ The API returns the following response:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "my-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name" : "other-policy",
|
||||
"indices" : ["users"],
|
||||
"match_field" : "email",
|
||||
"enrich_fields" : [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
"policies": [
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name": "my-policy",
|
||||
"indices": [ "users" ],
|
||||
"match_field": "email",
|
||||
"enrich_fields": [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"config": {
|
||||
"match": {
|
||||
"name": "other-policy",
|
||||
"indices": [ "users" ],
|
||||
"match_field": "email",
|
||||
"enrich_fields": [
|
||||
"first_name",
|
||||
"last_name",
|
||||
"city",
|
||||
"zip",
|
||||
"state"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -13,11 +13,11 @@ Creates an enrich policy.
|
|||
----
|
||||
PUT /users
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"email" : { "type" : "keyword" }
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"email": { "type": "keyword" }
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
////
|
||||
|
@ -26,11 +26,11 @@ PUT /users
|
|||
----
|
||||
PUT /_enrich/policy/my-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
|
|
@ -243,12 +243,11 @@ following:
|
|||
[source,js]
|
||||
----
|
||||
{
|
||||
"<enrich_policy_type>": {
|
||||
"indices": ["..."],
|
||||
"match_field": "...",
|
||||
"enrich_fields": ["..."],
|
||||
"query": {...}
|
||||
}
|
||||
"<enrich_policy_type>": {
|
||||
"indices": [ "..." ],
|
||||
"match_field": "...",
|
||||
"enrich_fields": [ "..." ],
|
||||
"query": {... }
|
||||
}
|
||||
}
|
||||
----
|
||||
|
@ -321,16 +320,16 @@ containing at least one `geo_shape` field.
|
|||
----
|
||||
PUT /postal_codes
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
},
|
||||
"postal_code": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
},
|
||||
"postal_code": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -340,11 +339,11 @@ Use the <<docs-index_,index API>> to index enrich data to this source index.
|
|||
----
|
||||
PUT /postal_codes/_doc/1?refresh=wait_for
|
||||
{
|
||||
"location": {
|
||||
"type": "envelope",
|
||||
"coordinates": [[13.0, 53.0], [14.0, 52.0]]
|
||||
},
|
||||
"postal_code": "96598"
|
||||
"location": {
|
||||
"type": "envelope",
|
||||
"coordinates": [ [ 13.0, 53.0 ], [ 14.0, 52.0 ] ]
|
||||
},
|
||||
"postal_code": "96598"
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
@ -362,11 +361,11 @@ policy with the `geo_match` policy type. This policy must include:
|
|||
----
|
||||
PUT /_enrich/policy/postal_policy
|
||||
{
|
||||
"geo_match": {
|
||||
"indices": "postal_codes",
|
||||
"match_field": "location",
|
||||
"enrich_fields": ["location","postal_code"]
|
||||
}
|
||||
"geo_match": {
|
||||
"indices": "postal_codes",
|
||||
"match_field": "location",
|
||||
"enrich_fields": [ "location", "postal_code" ]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
@ -419,9 +418,9 @@ include the `field` specified in your enrich processor.
|
|||
----
|
||||
PUT /users/_doc/0?pipeline=postal_lookup
|
||||
{
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"geo_location": "POINT (13.5 52.5)"
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"geo_location": "POINT (13.5 52.5)"
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
@ -495,14 +494,14 @@ new document to that index.
|
|||
----
|
||||
PUT /users/_doc/1?refresh=wait_for
|
||||
{
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"city": "New Orleans",
|
||||
"county": "Orleans",
|
||||
"state": "LA",
|
||||
"zip": 70116,
|
||||
"web": "mardy.asciidocsmith.com"
|
||||
"email": "mardy.brown@asciidocsmith.com",
|
||||
"first_name": "Mardy",
|
||||
"last_name": "Brown",
|
||||
"city": "New Orleans",
|
||||
"county": "Orleans",
|
||||
"state": "LA",
|
||||
"zip": 70116,
|
||||
"web": "mardy.asciidocsmith.com"
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -519,11 +518,11 @@ policy type. This policy must include:
|
|||
----
|
||||
PUT /_enrich/policy/users-policy
|
||||
{
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
"match": {
|
||||
"indices": "users",
|
||||
"match_field": "email",
|
||||
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
|
|
@ -35,16 +35,16 @@ PUT circles
|
|||
|
||||
PUT _ingest/pipeline/polygonize_circles
|
||||
{
|
||||
"description": "translate circle to polygon",
|
||||
"processors": [
|
||||
{
|
||||
"circle": {
|
||||
"field": "circle",
|
||||
"error_distance": 28.0,
|
||||
"shape_type": "geo_shape"
|
||||
}
|
||||
"description": "translate circle to polygon",
|
||||
"processors": [
|
||||
{
|
||||
"circle": {
|
||||
"field": "circle",
|
||||
"error_distance": 28.0,
|
||||
"shape_type": "geo_shape"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -230,22 +230,22 @@ PUT my_ip_locations/_doc/1?refresh=true&pipeline=geoip
|
|||
|
||||
GET /my_ip_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_distance" : {
|
||||
"distance" : "1m",
|
||||
"geoip.location" : {
|
||||
"lon" : -97.822,
|
||||
"lat" : 37.751
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_distance": {
|
||||
"distance": "1m",
|
||||
"geoip.location": {
|
||||
"lon": -97.822,
|
||||
"lat": 37.751
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -50,17 +50,17 @@ was provided in the original index request:
|
|||
--------------------------------------------------
|
||||
PUT _ingest/pipeline/my_index
|
||||
{
|
||||
"description": "use index:my_index and type:_doc",
|
||||
"processors": [
|
||||
{
|
||||
"script": {
|
||||
"source": """
|
||||
ctx._index = 'my_index';
|
||||
ctx._type = '_doc';
|
||||
"""
|
||||
}
|
||||
"description": "use index:my_index",
|
||||
"processors": [
|
||||
{
|
||||
"script": {
|
||||
"source": """
|
||||
ctx._index = 'my_index';
|
||||
ctx._type = '_doc';
|
||||
"""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -15,30 +15,30 @@ For example:
|
|||
--------------------------------------------------
|
||||
PUT my_index/_doc/1
|
||||
{
|
||||
"names": [ "John Abraham", "Lincoln Smith"]
|
||||
"names": [ "John Abraham", "Lincoln Smith"]
|
||||
}
|
||||
|
||||
GET my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"names": {
|
||||
"query": "Abraham Lincoln" <1>
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"names": {
|
||||
"query": "Abraham Lincoln" <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GET my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"names": {
|
||||
"query": "Abraham Lincoln",
|
||||
"slop": 101 <2>
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"names": {
|
||||
"query": "Abraham Lincoln",
|
||||
"slop": 101 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -65,16 +65,16 @@ PUT my_index
|
|||
|
||||
PUT my_index/_doc/1
|
||||
{
|
||||
"names": [ "John Abraham", "Lincoln Smith"]
|
||||
"names": [ "John Abraham", "Lincoln Smith"]
|
||||
}
|
||||
|
||||
GET my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"names": "Abraham Lincoln" <2>
|
||||
}
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"names": "Abraham Lincoln" <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -212,13 +212,13 @@ the cell right next to it -- even though the shape is very close to the point.
|
|||
--------------------------------------------------
|
||||
PUT /example
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -304,10 +304,10 @@ API. The following is an example of a point in GeoJSON.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "point",
|
||||
"coordinates" : [-77.03653, 38.897676]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "point",
|
||||
"coordinates" : [-77.03653, 38.897676]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -317,7 +317,7 @@ The following is an example of a point in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "POINT (-77.03653 38.897676)"
|
||||
"location" : "POINT (-77.03653 38.897676)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -334,10 +334,10 @@ following is an example of a LineString in GeoJSON.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "linestring",
|
||||
"coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "linestring",
|
||||
"coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -347,7 +347,7 @@ The following is an example of a LineString in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "LINESTRING (-77.03653 38.897676, -77.009051 38.889939)"
|
||||
"location" : "LINESTRING (-77.03653 38.897676, -77.009051 38.889939)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -366,12 +366,12 @@ closed). The following is an example of a Polygon in GeoJSON.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -381,7 +381,7 @@ The following is an example of a Polygon in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))"
|
||||
"location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -393,13 +393,13 @@ of a polygon with a hole:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
|
||||
[ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
|
||||
[ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -409,7 +409,7 @@ The following is an example of a Polygon with a hole in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))"
|
||||
"location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -436,13 +436,13 @@ crosses the dateline.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [-177.0, 10.0], [176.0, 15.0], [172.0, 0.0], [176.0, -15.0], [-177.0, -10.0], [-177.0, 10.0] ],
|
||||
[ [178.2, 8.2], [-178.8, 8.2], [-180.8, -8.8], [178.2, 8.8] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [-177.0, 10.0], [176.0, 15.0], [172.0, 0.0], [176.0, -15.0], [-177.0, -10.0], [-177.0, 10.0] ],
|
||||
[ [178.2, 8.2], [-178.8, 8.2], [-180.8, -8.8], [178.2, 8.8] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[catch:/mapper_parsing_exception/]
|
||||
|
@ -455,13 +455,13 @@ overriding the orientation on a document:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"orientation" : "clockwise",
|
||||
"coordinates" : [
|
||||
[ [100.0, 0.0], [100.0, 1.0], [101.0, 1.0], [101.0, 0.0], [100.0, 0.0] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"orientation" : "clockwise",
|
||||
"coordinates" : [
|
||||
[ [100.0, 0.0], [100.0, 1.0], [101.0, 1.0], [101.0, 0.0], [100.0, 0.0] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -475,12 +475,12 @@ The following is an example of a list of geojson points:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "multipoint",
|
||||
"coordinates" : [
|
||||
[102.0, 2.0], [103.0, 2.0]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "multipoint",
|
||||
"coordinates" : [
|
||||
[102.0, 2.0], [103.0, 2.0]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -490,7 +490,7 @@ The following is an example of a list of WKT points:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "MULTIPOINT (102.0 2.0, 103.0 2.0)"
|
||||
"location" : "MULTIPOINT (102.0 2.0, 103.0 2.0)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -504,14 +504,14 @@ The following is an example of a list of geojson linestrings:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "multilinestring",
|
||||
"coordinates" : [
|
||||
[ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0] ],
|
||||
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0] ],
|
||||
[ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "multilinestring",
|
||||
"coordinates" : [
|
||||
[ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0] ],
|
||||
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0] ],
|
||||
[ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -521,7 +521,7 @@ The following is an example of a list of WKT linestrings:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "MULTILINESTRING ((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0), (100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8))"
|
||||
"location" : "MULTILINESTRING ((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0), (100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -535,14 +535,14 @@ The following is an example of a list of geojson polygons (second polygon contai
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "multipolygon",
|
||||
"coordinates" : [
|
||||
[ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ],
|
||||
[ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
|
||||
[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "multipolygon",
|
||||
"coordinates" : [
|
||||
[ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ],
|
||||
[ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
|
||||
[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -552,7 +552,7 @@ The following is an example of a list of WKT polygons (second polygon contains a
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "MULTIPOLYGON (((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0, 102.0 2.0)), ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2)))"
|
||||
"location" : "MULTIPOLYGON (((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0, 102.0 2.0)), ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2)))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -566,19 +566,19 @@ The following is an example of a collection of geojson geometry objects:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type": "geometrycollection",
|
||||
"geometries": [
|
||||
{
|
||||
"type": "point",
|
||||
"coordinates": [100.0, 0.0]
|
||||
},
|
||||
{
|
||||
"type": "linestring",
|
||||
"coordinates": [ [101.0, 0.0], [102.0, 1.0] ]
|
||||
}
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type": "geometrycollection",
|
||||
"geometries": [
|
||||
{
|
||||
"type": "point",
|
||||
"coordinates": [100.0, 0.0]
|
||||
},
|
||||
{
|
||||
"type": "linestring",
|
||||
"coordinates": [ [101.0, 0.0], [102.0, 1.0] ]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -588,7 +588,7 @@ The following is an example of a collection of WKT geometry objects:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "GEOMETRYCOLLECTION (POINT (100.0 0.0), LINESTRING (101.0 0.0, 102.0 1.0))"
|
||||
"location" : "GEOMETRYCOLLECTION (POINT (100.0 0.0), LINESTRING (101.0 0.0, 102.0 1.0))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -604,10 +604,10 @@ bounding rectangle in the format `[[minLon, maxLat], [maxLon, minLat]]`:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "envelope",
|
||||
"coordinates" : [ [100.0, 1.0], [101.0, 0.0] ]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "envelope",
|
||||
"coordinates" : [ [100.0, 1.0], [101.0, 0.0] ]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -619,7 +619,7 @@ The following is an example of an envelope using the WKT BBOX format:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "BBOX (100.0, 102.0, 2.0, 0.0)"
|
||||
"location" : "BBOX (100.0, 102.0, 2.0, 0.0)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -636,11 +636,11 @@ a `POLYGON`.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "circle",
|
||||
"coordinates" : [101.0, 1.0],
|
||||
"radius" : "100m"
|
||||
}
|
||||
"location" : {
|
||||
"type" : "circle",
|
||||
"coordinates" : [101.0, 1.0],
|
||||
"radius" : "100m"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[skip:not supported in default]
|
||||
|
|
|
@ -178,80 +178,80 @@ Will return:
|
|||
[source,console-result]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...,
|
||||
"hits": {
|
||||
"total" : {
|
||||
"value": 4,
|
||||
"relation": "eq"
|
||||
...,
|
||||
"hits": {
|
||||
"total": {
|
||||
"value": 4,
|
||||
"relation": "eq"
|
||||
},
|
||||
"max_score": null,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"my_id": "1",
|
||||
"text": "This is a question",
|
||||
"my_join_field": "question" <1>
|
||||
},
|
||||
"max_score": null,
|
||||
"hits": [
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "1",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"my_id": "1",
|
||||
"text": "This is a question",
|
||||
"my_join_field": "question" <1>
|
||||
},
|
||||
"sort": [
|
||||
"1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"my_id": "2",
|
||||
"text": "This is another question",
|
||||
"my_join_field": "question" <2>
|
||||
},
|
||||
"sort": [
|
||||
"2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "3",
|
||||
"_score": null,
|
||||
"_routing": "1",
|
||||
"_source": {
|
||||
"my_id": "3",
|
||||
"text": "This is an answer",
|
||||
"my_join_field": {
|
||||
"name": "answer", <3>
|
||||
"parent": "1" <4>
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
"3"
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "4",
|
||||
"_score": null,
|
||||
"_routing": "1",
|
||||
"_source": {
|
||||
"my_id": "4",
|
||||
"text": "This is another answer",
|
||||
"my_join_field": {
|
||||
"name": "answer",
|
||||
"parent": "1"
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
"4"
|
||||
]
|
||||
}
|
||||
"sort": [
|
||||
"1"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "2",
|
||||
"_score": null,
|
||||
"_source": {
|
||||
"my_id": "2",
|
||||
"text": "This is another question",
|
||||
"my_join_field": "question" <2>
|
||||
},
|
||||
"sort": [
|
||||
"2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "3",
|
||||
"_score": null,
|
||||
"_routing": "1",
|
||||
"_source": {
|
||||
"my_id": "3",
|
||||
"text": "This is an answer",
|
||||
"my_join_field": {
|
||||
"name": "answer", <3>
|
||||
"parent": "1" <4>
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
"3"
|
||||
]
|
||||
},
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_type": "_doc",
|
||||
"_id": "4",
|
||||
"_score": null,
|
||||
"_routing": "1",
|
||||
"_source": {
|
||||
"my_id": "4",
|
||||
"text": "This is another answer",
|
||||
"my_join_field": {
|
||||
"name": "answer",
|
||||
"parent": "1"
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
"4"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/\.\.\./"timed_out": false, "took": $body.took, "_shards": $body._shards/]
|
||||
|
|
|
@ -20,16 +20,16 @@ If the following mapping configures the `percolator` field type for the
|
|||
--------------------------------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "percolator"
|
||||
},
|
||||
"field": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "percolator"
|
||||
},
|
||||
"field": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -40,11 +40,11 @@ Then you can index a query:
|
|||
--------------------------------------------------
|
||||
PUT my_index/_doc/match_value
|
||||
{
|
||||
"query" : {
|
||||
"match" : {
|
||||
"field" : "value"
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"field": "value"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -77,13 +77,13 @@ for indices created with ElasticSearch 7.5.0 or higher.
|
|||
--------------------------------------------------
|
||||
PUT /example
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"geometry": {
|
||||
"type": "shape"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"geometry": {
|
||||
"type": "shape"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -141,10 +141,10 @@ following is an example of a point in GeoJSON.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "point",
|
||||
"coordinates" : [-377.03653, 389.897676]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "point",
|
||||
"coordinates" : [-377.03653, 389.897676]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -154,7 +154,7 @@ The following is an example of a point in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "POINT (-377.03653 389.897676)"
|
||||
"location" : "POINT (-377.03653 389.897676)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -171,10 +171,10 @@ following is an example of a LineString in GeoJSON.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "linestring",
|
||||
"coordinates" : [[-377.03653, 389.897676], [-377.009051, 389.889939]]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "linestring",
|
||||
"coordinates" : [[-377.03653, 389.897676], [-377.009051, 389.889939]]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -184,7 +184,7 @@ The following is an example of a LineString in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)"
|
||||
"location" : "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -200,12 +200,12 @@ closed). The following is an example of a Polygon in GeoJSON.
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -215,7 +215,7 @@ The following is an example of a Polygon in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "POLYGON ((1000.0 -1001.0, 1001.0 -1001.0, 1001.0 -1000.0, 1000.0 -1000.0, 1000.0 -1001.0))"
|
||||
"location" : "POLYGON ((1000.0 -1001.0, 1001.0 -1001.0, 1001.0 -1000.0, 1000.0 -1000.0, 1000.0 -1001.0))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -227,13 +227,13 @@ of a polygon with a hole:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ],
|
||||
[ [1000.2, -1001.2], [1000.8, -1001.2], [1000.8, -1001.8], [1000.2, -1001.8], [1000.2, -1001.2] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"coordinates" : [
|
||||
[ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ],
|
||||
[ [1000.2, -1001.2], [1000.8, -1001.2], [1000.8, -1001.8], [1000.2, -1001.8], [1000.2, -1001.2] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -243,7 +243,7 @@ The following is an example of a Polygon with a hole in WKT:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "POLYGON ((1000.0 1000.0, 1001.0 1000.0, 1001.0 1001.0, 1000.0 1001.0, 1000.0 1000.0), (1000.2 1000.2, 1000.8 1000.2, 1000.8 1000.8, 1000.2 1000.8, 1000.2 1000.2))"
|
||||
"location" : "POLYGON ((1000.0 1000.0, 1001.0 1000.0, 1001.0 1001.0, 1000.0 1001.0, 1000.0 1000.0), (1000.2 1000.2, 1000.8 1000.2, 1000.8 1000.8, 1000.2 1000.8, 1000.2 1000.2))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -265,13 +265,13 @@ The following is an example of overriding the `orientation` parameters on a docu
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"orientation" : "clockwise",
|
||||
"coordinates" : [
|
||||
[ [1000.0, 1000.0], [1000.0, 1001.0], [1001.0, 1001.0], [1001.0, 1000.0], [1000.0, 1000.0] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "polygon",
|
||||
"orientation" : "clockwise",
|
||||
"coordinates" : [
|
||||
[ [1000.0, 1000.0], [1000.0, 1001.0], [1001.0, 1001.0], [1001.0, 1000.0], [1000.0, 1000.0] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -285,12 +285,12 @@ The following is an example of a list of geojson points:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "multipoint",
|
||||
"coordinates" : [
|
||||
[1002.0, 1002.0], [1003.0, 2000.0]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "multipoint",
|
||||
"coordinates" : [
|
||||
[1002.0, 1002.0], [1003.0, 2000.0]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -300,7 +300,7 @@ The following is an example of a list of WKT points:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "MULTIPOINT (1002.0 2000.0, 1003.0 2000.0)"
|
||||
"location" : "MULTIPOINT (1002.0 2000.0, 1003.0 2000.0)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -314,14 +314,14 @@ The following is an example of a list of geojson linestrings:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "multilinestring",
|
||||
"coordinates" : [
|
||||
[ [1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0] ],
|
||||
[ [1000.0, 100.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0] ],
|
||||
[ [1000.2, 100.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "multilinestring",
|
||||
"coordinates" : [
|
||||
[ [1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0] ],
|
||||
[ [1000.0, 100.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0] ],
|
||||
[ [1000.2, 100.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -331,7 +331,7 @@ The following is an example of a list of WKT linestrings:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "MULTILINESTRING ((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0), (1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0), (1000.2 0.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8))"
|
||||
"location" : "MULTILINESTRING ((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0), (1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0), (1000.2 0.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -345,14 +345,14 @@ The following is an example of a list of geojson polygons (second polygon contai
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "multipolygon",
|
||||
"coordinates" : [
|
||||
[ [[1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0], [1002.0, 200.0]] ],
|
||||
[ [[1000.0, 200.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0], [1000.0, 100.0]],
|
||||
[[1000.2, 200.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8], [1000.2, 100.2]] ]
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "multipolygon",
|
||||
"coordinates" : [
|
||||
[ [[1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0], [1002.0, 200.0]] ],
|
||||
[ [[1000.0, 200.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0], [1000.0, 100.0]],
|
||||
[[1000.2, 200.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8], [1000.2, 100.2]] ]
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -362,7 +362,7 @@ The following is an example of a list of WKT polygons (second polygon contains a
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "MULTIPOLYGON (((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0, 102.0 200.0)), ((1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0, 1000.0 100.0), (1000.2 100.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8, 1000.2 100.2)))"
|
||||
"location" : "MULTIPOLYGON (((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0, 102.0 200.0)), ((1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0, 1000.0 100.0), (1000.2 100.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8, 1000.2 100.2)))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -376,19 +376,19 @@ The following is an example of a collection of geojson geometry objects:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type": "geometrycollection",
|
||||
"geometries": [
|
||||
{
|
||||
"type": "point",
|
||||
"coordinates": [1000.0, 100.0]
|
||||
},
|
||||
{
|
||||
"type": "linestring",
|
||||
"coordinates": [ [1001.0, 100.0], [1002.0, 100.0] ]
|
||||
}
|
||||
]
|
||||
}
|
||||
"location" : {
|
||||
"type": "geometrycollection",
|
||||
"geometries": [
|
||||
{
|
||||
"type": "point",
|
||||
"coordinates": [1000.0, 100.0]
|
||||
},
|
||||
{
|
||||
"type": "linestring",
|
||||
"coordinates": [ [1001.0, 100.0], [1002.0, 100.0] ]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -398,7 +398,7 @@ The following is an example of a collection of WKT geometry objects:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "GEOMETRYCOLLECTION (POINT (1000.0 100.0), LINESTRING (1001.0 100.0, 1002.0 100.0))"
|
||||
"location" : "GEOMETRYCOLLECTION (POINT (1000.0 100.0), LINESTRING (1001.0 100.0, 1002.0 100.0))"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -413,10 +413,10 @@ bounding rectangle in the format `[[minX, maxY], [maxX, minY]]`:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : {
|
||||
"type" : "envelope",
|
||||
"coordinates" : [ [1000.0, 100.0], [1001.0, 100.0] ]
|
||||
}
|
||||
"location" : {
|
||||
"type" : "envelope",
|
||||
"coordinates" : [ [1000.0, 100.0], [1001.0, 100.0] ]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -428,7 +428,7 @@ The following is an example of an envelope using the WKT BBOX format:
|
|||
--------------------------------------------------
|
||||
POST /example/_doc
|
||||
{
|
||||
"location" : "BBOX (1000.0, 1002.0, 2000.0, 1000.0)"
|
||||
"location" : "BBOX (1000.0, 1002.0, 2000.0, 1000.0)"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -60,26 +60,26 @@ no effect on the estimation. +
|
|||
--------------------------------------------------
|
||||
POST _ml/anomaly_detectors/_estimate_model_memory
|
||||
{
|
||||
"analysis_config": {
|
||||
"bucket_span": "5m",
|
||||
"detectors": [
|
||||
{
|
||||
"function": "sum",
|
||||
"field_name": "bytes",
|
||||
"by_field_name": "status",
|
||||
"partition_field_name": "app"
|
||||
}
|
||||
],
|
||||
"influencers": [ "source_ip", "dest_ip" ]
|
||||
},
|
||||
"overall_cardinality": {
|
||||
"status": 10,
|
||||
"app": 50
|
||||
},
|
||||
"max_bucket_cardinality": {
|
||||
"source_ip": 300,
|
||||
"dest_ip": 30
|
||||
}
|
||||
"analysis_config": {
|
||||
"bucket_span": "5m",
|
||||
"detectors": [
|
||||
{
|
||||
"function": "sum",
|
||||
"field_name": "bytes",
|
||||
"by_field_name": "status",
|
||||
"partition_field_name": "app"
|
||||
}
|
||||
],
|
||||
"influencers": [ "source_ip", "dest_ip" ]
|
||||
},
|
||||
"overall_cardinality": {
|
||||
"status": 10,
|
||||
"app": 50
|
||||
},
|
||||
"max_bucket_cardinality": {
|
||||
"source_ip": 300,
|
||||
"dest_ip": 30
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[skip:needs-licence]
|
||||
|
|
|
@ -41,20 +41,20 @@ The following example validates job configuration information:
|
|||
--------------------------------------------------
|
||||
POST _ml/anomaly_detectors/_validate
|
||||
{
|
||||
"description" : "Unusual response times by airlines",
|
||||
"analysis_config" : {
|
||||
"bucket_span": "300S",
|
||||
"detectors" :[
|
||||
{
|
||||
"function": "metric",
|
||||
"field_name": "responsetime",
|
||||
"by_field_name": "airline"}],
|
||||
"influencers": [ "airline" ]
|
||||
},
|
||||
"data_description" : {
|
||||
"time_field": "time",
|
||||
"time_format": "yyyy-MM-dd'T'HH:mm:ssX"
|
||||
}
|
||||
"description": "Unusual response times by airlines",
|
||||
"analysis_config": {
|
||||
"bucket_span": "300S",
|
||||
"detectors": [
|
||||
{
|
||||
"function": "metric",
|
||||
"field_name": "responsetime",
|
||||
"by_field_name": "airline" } ],
|
||||
"influencers": [ "airline" ]
|
||||
},
|
||||
"data_description": {
|
||||
"time_field": "time",
|
||||
"time_format": "yyyy-MM-dd'T'HH:mm:ssX"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[skip:needs-licence]
|
||||
|
|
|
@ -189,27 +189,27 @@ The API returns the following results:
|
|||
[source,console-result]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
"data_frame_analytics": [
|
||||
{
|
||||
"id": "loganalytics",
|
||||
"source": {
|
||||
"index": "logdata",
|
||||
"query": {
|
||||
"match_all": {}
|
||||
}
|
||||
},
|
||||
"dest": {
|
||||
"index": "logdata_out",
|
||||
"results_field": "ml"
|
||||
},
|
||||
"analysis": {
|
||||
"outlier_detection": {}
|
||||
},
|
||||
"model_memory_limit": "1gb",
|
||||
"create_time": 1562265491319,
|
||||
"version" : "8.0.0"
|
||||
"count": 1,
|
||||
"data_frame_analytics": [
|
||||
{
|
||||
"id": "loganalytics",
|
||||
"source": {
|
||||
"index": "logdata",
|
||||
"query": {
|
||||
"match_all": {}
|
||||
}
|
||||
]
|
||||
},
|
||||
"dest": {
|
||||
"index": "logdata_out",
|
||||
"results_field": "ml"
|
||||
},
|
||||
"analysis": {
|
||||
"outlier_detection": {}
|
||||
},
|
||||
"model_memory_limit": "1gb",
|
||||
"create_time": 1562265491319,
|
||||
"version": "8.0.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
----
|
||||
|
|
|
@ -494,30 +494,30 @@ The API returns the following result:
|
|||
[source,console-result]
|
||||
----
|
||||
{
|
||||
"id": "loganalytics",
|
||||
"description": "Outlier detection on log data",
|
||||
"source": {
|
||||
"index": ["logdata"],
|
||||
"query": {
|
||||
"match_all": {}
|
||||
}
|
||||
},
|
||||
"dest": {
|
||||
"index": "logdata_out",
|
||||
"results_field": "ml"
|
||||
},
|
||||
"analysis": {
|
||||
"outlier_detection": {
|
||||
"compute_feature_influence": true,
|
||||
"outlier_fraction": 0.05,
|
||||
"standardization_enabled": true
|
||||
}
|
||||
},
|
||||
"model_memory_limit": "1gb",
|
||||
"create_time" : 1562265491319,
|
||||
"version" : "7.6.0",
|
||||
"allow_lazy_start" : false,
|
||||
"max_num_threads": 1
|
||||
"id": "loganalytics",
|
||||
"description": "Outlier detection on log data",
|
||||
"source": {
|
||||
"index": ["logdata"],
|
||||
"query": {
|
||||
"match_all": {}
|
||||
}
|
||||
},
|
||||
"dest": {
|
||||
"index": "logdata_out",
|
||||
"results_field": "ml"
|
||||
},
|
||||
"analysis": {
|
||||
"outlier_detection": {
|
||||
"compute_feature_influence": true,
|
||||
"outlier_fraction": 0.05,
|
||||
"standardization_enabled": true
|
||||
}
|
||||
},
|
||||
"model_memory_limit": "1gb",
|
||||
"create_time" : 1562265491319,
|
||||
"version" : "7.6.0",
|
||||
"allow_lazy_start" : false,
|
||||
"max_num_threads": 1
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE[s/1562265491319/$body.$_path/]
|
||||
|
|
|
@ -29,12 +29,12 @@ and the number of replicas to two.
|
|||
----------------------------------
|
||||
PUT /_template/custom_monitoring
|
||||
{
|
||||
"index_patterns": ".monitoring-*",
|
||||
"order": 1,
|
||||
"settings": {
|
||||
"number_of_shards": 5,
|
||||
"number_of_replicas": 2
|
||||
}
|
||||
"index_patterns": ".monitoring-*",
|
||||
"order": 1,
|
||||
"settings": {
|
||||
"number_of_shards": 5,
|
||||
"number_of_replicas": 2
|
||||
}
|
||||
}
|
||||
----------------------------------
|
||||
|
||||
|
|
|
@ -40,12 +40,12 @@ Guidelines
|
|||
----
|
||||
GET _search
|
||||
{
|
||||
"query": {
|
||||
"sample": {
|
||||
"foo": "baz",
|
||||
"bar": true
|
||||
}
|
||||
"query": {
|
||||
"sample": {
|
||||
"foo": "baz",
|
||||
"bar": true
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[skip: REMOVE THIS COMMENT.]
|
||||
|
@ -107,12 +107,12 @@ For example:
|
|||
----
|
||||
GET my_time_series_index/_search
|
||||
{
|
||||
"query": {
|
||||
"sample": {
|
||||
"foo": "baz",
|
||||
"bar": false
|
||||
}
|
||||
"query": {
|
||||
"sample": {
|
||||
"foo": "baz",
|
||||
"bar": false
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[skip: REMOVE THIS COMMENT.]
|
|
@ -18,21 +18,21 @@ excluding them from the search results.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"boosting" : {
|
||||
"positive" : {
|
||||
"term" : {
|
||||
"text" : "apple"
|
||||
}
|
||||
},
|
||||
"negative" : {
|
||||
"term" : {
|
||||
"text" : "pie tart fruit crumble tree"
|
||||
}
|
||||
},
|
||||
"negative_boost" : 0.5
|
||||
"query": {
|
||||
"boosting": {
|
||||
"positive": {
|
||||
"term": {
|
||||
"text": "apple"
|
||||
}
|
||||
},
|
||||
"negative": {
|
||||
"term": {
|
||||
"text": "pie tart fruit crumble tree"
|
||||
}
|
||||
},
|
||||
"negative_boost": 0.5
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -12,14 +12,14 @@ parameter value.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"constant_score" : {
|
||||
"filter" : {
|
||||
"term" : { "user" : "kimchy"}
|
||||
},
|
||||
"boost" : 1.2
|
||||
}
|
||||
"query": {
|
||||
"constant_score": {
|
||||
"filter": {
|
||||
"term": { "user": "kimchy" }
|
||||
},
|
||||
"boost": 1.2
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -21,15 +21,15 @@ You can use the `dis_max` to search for a term in fields mapped with different
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"dis_max" : {
|
||||
"queries" : [
|
||||
{ "term" : { "title" : "Quick pets" }},
|
||||
{ "term" : { "body" : "Quick pets" }}
|
||||
],
|
||||
"tie_breaker" : 0.7
|
||||
}
|
||||
"query": {
|
||||
"dis_max": {
|
||||
"queries": [
|
||||
{ "term": { "title": "Quick pets" } },
|
||||
{ "term": { "body": "Quick pets" } }
|
||||
],
|
||||
"tie_breaker": 0.7
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -20,11 +20,11 @@ An indexed value may not exist for a document's field due to a variety of reason
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"exists": {
|
||||
"field": "user"
|
||||
}
|
||||
"query": {
|
||||
"exists": {
|
||||
"field": "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -56,14 +56,14 @@ the `user` field.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must_not": {
|
||||
"exists": {
|
||||
"field": "user"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must_not": {
|
||||
"exists": {
|
||||
"field": "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
|
|
@ -19,14 +19,14 @@ by the query.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": { "match_all": {} },
|
||||
"boost": "5",
|
||||
"random_score": {}, <1>
|
||||
"boost_mode":"multiply"
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": { "match_all": {} },
|
||||
"boost": "5",
|
||||
"random_score": {}, <1>
|
||||
"boost_mode": "multiply"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -41,27 +41,27 @@ given filtering query
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": { "match_all": {} },
|
||||
"boost": "5", <1>
|
||||
"functions": [
|
||||
{
|
||||
"filter": { "match": { "test": "bar" } },
|
||||
"random_score": {}, <2>
|
||||
"weight": 23
|
||||
},
|
||||
{
|
||||
"filter": { "match": { "test": "cat" } },
|
||||
"weight": 42
|
||||
}
|
||||
],
|
||||
"max_boost": 42,
|
||||
"score_mode": "max",
|
||||
"boost_mode": "multiply",
|
||||
"min_score" : 42
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": { "match_all": {} },
|
||||
"boost": "5", <1>
|
||||
"functions": [
|
||||
{
|
||||
"filter": { "match": { "test": "bar" } },
|
||||
"random_score": {}, <2>
|
||||
"weight": 23
|
||||
},
|
||||
{
|
||||
"filter": { "match": { "test": "cat" } },
|
||||
"weight": 42
|
||||
}
|
||||
],
|
||||
"max_boost": 42,
|
||||
"score_mode": "max",
|
||||
"boost_mode": "multiply",
|
||||
"min_score": 42
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -137,18 +137,18 @@ simple sample:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script_score" : {
|
||||
"script" : {
|
||||
"source": "Math.log(2 + doc['likes'].value)"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script_score": {
|
||||
"script": {
|
||||
"source": "Math.log(2 + doc['likes'].value)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -175,22 +175,22 @@ same script, and provide parameters to it:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script_score" : {
|
||||
"script" : {
|
||||
"params": {
|
||||
"a": 5,
|
||||
"b": 1.2
|
||||
},
|
||||
"source": "params.a / Math.pow(params.b, doc['likes'].value)"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"query": {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script_score": {
|
||||
"script": {
|
||||
"params": {
|
||||
"a": 5,
|
||||
"b": 1.2
|
||||
},
|
||||
"source": "params.a / Math.pow(params.b, doc['likes'].value)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -241,14 +241,14 @@ a lot of memory.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"random_score": {
|
||||
"seed": 10,
|
||||
"field": "_seq_no"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"random_score": {
|
||||
"seed": 10,
|
||||
"field": "_seq_no"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -269,16 +269,16 @@ doing so would look like:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"field_value_factor": {
|
||||
"field": "likes",
|
||||
"factor": 1.2,
|
||||
"modifier": "sqrt",
|
||||
"missing": 1
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"field_value_factor": {
|
||||
"field": "likes",
|
||||
"factor": 1.2,
|
||||
"modifier": "sqrt",
|
||||
"missing": 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -380,18 +380,18 @@ days, weeks, and so on. Example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"gauss": {
|
||||
"date": {
|
||||
"origin": "2013-09-17", <1>
|
||||
"scale": "10d",
|
||||
"offset": "5d", <2>
|
||||
"decay" : 0.5 <2>
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"gauss": {
|
||||
"date": {
|
||||
"origin": "2013-09-17", <1>
|
||||
"scale": "10d",
|
||||
"offset": "5d", <2>
|
||||
"decay": 0.5 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
@ -578,34 +578,34 @@ the request would look like this:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"function_score": {
|
||||
"functions": [
|
||||
{
|
||||
"gauss": {
|
||||
"price": {
|
||||
"origin": "0",
|
||||
"scale": "20"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"gauss": {
|
||||
"location": {
|
||||
"origin": "11, 12",
|
||||
"scale": "2km"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"function_score": {
|
||||
"functions": [
|
||||
{
|
||||
"gauss": {
|
||||
"price": {
|
||||
"origin": "0",
|
||||
"scale": "20"
|
||||
}
|
||||
],
|
||||
"query": {
|
||||
"match": {
|
||||
"properties": "balcony"
|
||||
}
|
||||
},
|
||||
{
|
||||
"gauss": {
|
||||
"location": {
|
||||
"origin": "11, 12",
|
||||
"scale": "2km"
|
||||
}
|
||||
},
|
||||
"score_mode": "multiply"
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": {
|
||||
"match": {
|
||||
"properties": "balcony"
|
||||
}
|
||||
},
|
||||
"score_mode": "multiply"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -29,13 +29,13 @@ The query then returns exact matches for each expansion.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"fuzzy": {
|
||||
"user": {
|
||||
"value": "ki"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"fuzzy": {
|
||||
"user": {
|
||||
"value": "ki"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -46,18 +46,18 @@ GET /_search
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"fuzzy": {
|
||||
"user": {
|
||||
"value": "ki",
|
||||
"fuzziness": "AUTO",
|
||||
"max_expansions": 50,
|
||||
"prefix_length": 0,
|
||||
"transpositions": true,
|
||||
"rewrite": "constant_score"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"fuzzy": {
|
||||
"user": {
|
||||
"value": "ki",
|
||||
"fuzziness": "AUTO",
|
||||
"max_expansions": 50,
|
||||
"prefix_length": 0,
|
||||
"transpositions": true,
|
||||
"rewrite": "constant_score"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -11,27 +11,27 @@ bounding box. Assuming the following indexed document:
|
|||
--------------------------------------------------
|
||||
PUT /my_locations
|
||||
{
|
||||
"mappings": {
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"pin": {
|
||||
"properties": {
|
||||
"pin": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
}
|
||||
}
|
||||
}
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /my_locations/_doc/1
|
||||
{
|
||||
"pin" : {
|
||||
"location" : {
|
||||
"lat" : 40.12,
|
||||
"lon" : -71.34
|
||||
}
|
||||
"pin": {
|
||||
"location": {
|
||||
"lat": 40.12,
|
||||
"lon": -71.34
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -43,27 +43,27 @@ Then the following simple query can be executed with a
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": {
|
||||
"lat": 40.73,
|
||||
"lon": -74.1
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : {
|
||||
"lat" : 40.73,
|
||||
"lon" : -74.1
|
||||
},
|
||||
"bottom_right" : {
|
||||
"lat" : 40.01,
|
||||
"lon" : -71.12
|
||||
}
|
||||
}
|
||||
}
|
||||
"bottom_right": {
|
||||
"lat": 40.01,
|
||||
"lon": -71.12
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -98,27 +98,27 @@ representations of the geo point, the filter can accept it as well:
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": {
|
||||
"lat": 40.73,
|
||||
"lon": -74.1
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : {
|
||||
"lat" : 40.73,
|
||||
"lon" : -74.1
|
||||
},
|
||||
"bottom_right" : {
|
||||
"lat" : 40.01,
|
||||
"lon" : -71.12
|
||||
}
|
||||
}
|
||||
}
|
||||
"bottom_right": {
|
||||
"lat": 40.01,
|
||||
"lon": -71.12
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -132,21 +132,21 @@ conform with http://geojson.org/[GeoJSON].
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : [-74.1, 40.73],
|
||||
"bottom_right" : [-71.12, 40.01]
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": [ -74.1, 40.73 ],
|
||||
"bottom_right": [ -71.12, 40.01 ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -159,21 +159,21 @@ Format in `lat,lon`.
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : "40.73, -74.1",
|
||||
"bottom_right" : "40.01, -71.12"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": "40.73, -74.1",
|
||||
"bottom_right": "40.01, -71.12"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -184,20 +184,20 @@ GET my_locations/_search
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"wkt" : "BBOX (-74.1, -71.12, 40.73, 40.01)"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"wkt": "BBOX (-74.1, -71.12, 40.73, 40.01)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -208,21 +208,21 @@ GET my_locations/_search
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : "dr5r9ydj2y73",
|
||||
"bottom_right" : "drj7teegpus6"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": "dr5r9ydj2y73",
|
||||
"bottom_right": "drj7teegpus6"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -242,14 +242,14 @@ geohash the geohash can be specified in both `top_left` and
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : "dr",
|
||||
"bottom_right" : "dr"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": "dr",
|
||||
"bottom_right": "dr"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -271,23 +271,23 @@ values separately.
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top" : 40.73,
|
||||
"left" : -74.1,
|
||||
"bottom" : 40.01,
|
||||
"right" : -71.12
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top": 40.73,
|
||||
"left": -74.1,
|
||||
"bottom": 40.01,
|
||||
"right": -71.12
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -320,28 +320,28 @@ are not supported. Here is an example:
|
|||
--------------------------------------------------
|
||||
GET my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_bounding_box": {
|
||||
"pin.location": {
|
||||
"top_left": {
|
||||
"lat": 40.73,
|
||||
"lon": -74.1
|
||||
},
|
||||
"filter" : {
|
||||
"geo_bounding_box" : {
|
||||
"pin.location" : {
|
||||
"top_left" : {
|
||||
"lat" : 40.73,
|
||||
"lon" : -74.1
|
||||
},
|
||||
"bottom_right" : {
|
||||
"lat" : 40.10,
|
||||
"lon" : -71.12
|
||||
}
|
||||
},
|
||||
"type" : "indexed"
|
||||
}
|
||||
"bottom_right": {
|
||||
"lat": 40.10,
|
||||
"lon": -71.12
|
||||
}
|
||||
},
|
||||
"type": "indexed"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -12,27 +12,27 @@ document:
|
|||
--------------------------------------------------
|
||||
PUT /my_locations
|
||||
{
|
||||
"mappings": {
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"pin": {
|
||||
"properties": {
|
||||
"pin": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
}
|
||||
}
|
||||
}
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /my_locations/_doc/1
|
||||
{
|
||||
"pin" : {
|
||||
"location" : {
|
||||
"lat" : 40.12,
|
||||
"lon" : -71.34
|
||||
}
|
||||
"pin": {
|
||||
"location": {
|
||||
"lat": 40.12,
|
||||
"lon": -71.34
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -45,22 +45,22 @@ filter:
|
|||
--------------------------------------------------
|
||||
GET /my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_distance" : {
|
||||
"distance" : "200km",
|
||||
"pin.location" : {
|
||||
"lat" : 40,
|
||||
"lon" : -70
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_distance": {
|
||||
"distance": "200km",
|
||||
"pin.location": {
|
||||
"lat": 40,
|
||||
"lon": -70
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -77,22 +77,22 @@ representations of the geo point, the filter can accept it as well:
|
|||
--------------------------------------------------
|
||||
GET /my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_distance" : {
|
||||
"distance" : "12km",
|
||||
"pin.location" : {
|
||||
"lat" : 40,
|
||||
"lon" : -70
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_distance": {
|
||||
"distance": "12km",
|
||||
"pin.location": {
|
||||
"lat": 40,
|
||||
"lon": -70
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -106,19 +106,19 @@ conform with http://geojson.org/[GeoJSON].
|
|||
--------------------------------------------------
|
||||
GET /my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_distance" : {
|
||||
"distance" : "12km",
|
||||
"pin.location" : [-70, 40]
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_distance": {
|
||||
"distance": "12km",
|
||||
"pin.location": [ -70, 40 ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -132,19 +132,19 @@ Format in `lat,lon`.
|
|||
--------------------------------------------------
|
||||
GET /my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_distance" : {
|
||||
"distance" : "12km",
|
||||
"pin.location" : "40,-70"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_distance": {
|
||||
"distance": "12km",
|
||||
"pin.location": "40,-70"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -155,19 +155,19 @@ GET /my_locations/_search
|
|||
--------------------------------------------------
|
||||
GET /my_locations/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_distance" : {
|
||||
"distance" : "12km",
|
||||
"pin.location" : "drm3btev3e86"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_distance": {
|
||||
"distance": "12km",
|
||||
"pin.location": "drm3btev3e86"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -11,24 +11,24 @@ points. Here is an example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_polygon" : {
|
||||
"person.location" : {
|
||||
"points" : [
|
||||
{"lat" : 40, "lon" : -70},
|
||||
{"lat" : 30, "lon" : -80},
|
||||
{"lat" : 20, "lon" : -90}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_polygon": {
|
||||
"person.location": {
|
||||
"points": [
|
||||
{ "lat": 40, "lon": -70 },
|
||||
{ "lat": 30, "lon": -80 },
|
||||
{ "lat": 20, "lon": -90 }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -60,24 +60,24 @@ conform with http://geojson.org/[GeoJSON].
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_polygon" : {
|
||||
"person.location" : {
|
||||
"points" : [
|
||||
[-70, 40],
|
||||
[-80, 30],
|
||||
[-90, 20]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_polygon": {
|
||||
"person.location": {
|
||||
"points": [
|
||||
[ -70, 40 ],
|
||||
[ -80, 30 ],
|
||||
[ -90, 20 ]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -90,24 +90,24 @@ Format in `lat,lon`.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_polygon" : {
|
||||
"person.location" : {
|
||||
"points" : [
|
||||
"40, -70",
|
||||
"30, -80",
|
||||
"20, -90"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_polygon": {
|
||||
"person.location": {
|
||||
"points": [
|
||||
"40, -70",
|
||||
"30, -80",
|
||||
"20, -90"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -118,24 +118,24 @@ GET /_search
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"must" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"filter" : {
|
||||
"geo_polygon" : {
|
||||
"person.location" : {
|
||||
"points" : [
|
||||
"drn5x1g8cu2y",
|
||||
"30, -80",
|
||||
"20, -90"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_polygon": {
|
||||
"person.location": {
|
||||
"points": [
|
||||
"drn5x1g8cu2y",
|
||||
"30, -80",
|
||||
"20, -90"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -31,22 +31,22 @@ Given the following index with locations as `geo_shape` fields:
|
|||
--------------------------------------------------
|
||||
PUT /example
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
POST /example/_doc?refresh
|
||||
{
|
||||
"name": "Wind & Wetter, Berlin, Germany",
|
||||
"location": {
|
||||
"type": "point",
|
||||
"coordinates": [13.400544, 52.530286]
|
||||
}
|
||||
"name": "Wind & Wetter, Berlin, Germany",
|
||||
"location": {
|
||||
"type": "point",
|
||||
"coordinates": [ 13.400544, 52.530286 ]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -59,24 +59,24 @@ extension:
|
|||
--------------------------------------------------
|
||||
GET /example/_search
|
||||
{
|
||||
"query":{
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_shape": {
|
||||
"location": {
|
||||
"shape": {
|
||||
"type": "envelope",
|
||||
"coordinates": [ [ 13.0, 53.0 ], [ 14.0, 52.0 ] ]
|
||||
},
|
||||
"filter": {
|
||||
"geo_shape": {
|
||||
"location": {
|
||||
"shape": {
|
||||
"type": "envelope",
|
||||
"coordinates" : [[13.0, 53.0], [14.0, 52.0]]
|
||||
},
|
||||
"relation": "within"
|
||||
}
|
||||
}
|
||||
}
|
||||
"relation": "within"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -87,19 +87,19 @@ The above query can, similarly, be queried on `geo_point` fields.
|
|||
--------------------------------------------------
|
||||
PUT /example_points
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /example_points/_doc/1?refresh
|
||||
{
|
||||
"name": "Wind & Wetter, Berlin, Germany",
|
||||
"location": [13.400544, 52.530286]
|
||||
"name": "Wind & Wetter, Berlin, Germany",
|
||||
"location": [13.400544, 52.530286]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -112,24 +112,24 @@ returned.
|
|||
--------------------------------------------------
|
||||
GET /example_points/_search
|
||||
{
|
||||
"query":{
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": {
|
||||
"match_all": {}
|
||||
},
|
||||
"filter": {
|
||||
"geo_shape": {
|
||||
"location": {
|
||||
"shape": {
|
||||
"type": "envelope",
|
||||
"coordinates": [ [ 13.0, 53.0 ], [ 14.0, 52.0 ] ]
|
||||
},
|
||||
"filter": {
|
||||
"geo_shape": {
|
||||
"location": {
|
||||
"shape": {
|
||||
"type": "envelope",
|
||||
"coordinates" : [[13.0, 53.0], [14.0, 52.0]]
|
||||
},
|
||||
"relation": "intersects"
|
||||
}
|
||||
}
|
||||
}
|
||||
"relation": "intersects"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -191,40 +191,40 @@ shape:
|
|||
--------------------------------------------------
|
||||
PUT /shapes
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "geo_shape"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /shapes/_doc/deu
|
||||
{
|
||||
"location": {
|
||||
"type": "envelope",
|
||||
"coordinates" : [[13.0, 53.0], [14.0, 52.0]]
|
||||
}
|
||||
"location": {
|
||||
"type": "envelope",
|
||||
"coordinates" : [[13.0, 53.0], [14.0, 52.0]]
|
||||
}
|
||||
}
|
||||
|
||||
GET /example/_search
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"geo_shape": {
|
||||
"location": {
|
||||
"indexed_shape": {
|
||||
"index": "shapes",
|
||||
"id": "deu",
|
||||
"path": "location"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"geo_shape": {
|
||||
"location": {
|
||||
"indexed_shape": {
|
||||
"index": "shapes",
|
||||
"id": "deu",
|
||||
"path": "location"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -31,16 +31,16 @@ field mapping. For example:
|
|||
----
|
||||
PUT /my_index
|
||||
{
|
||||
"mappings": {
|
||||
"properties" : {
|
||||
"my-join-field" : {
|
||||
"type" : "join",
|
||||
"relations": {
|
||||
"parent": "child"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"my-join-field": {
|
||||
"type": "join",
|
||||
"relations": {
|
||||
"parent": "child"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
|
@ -53,17 +53,17 @@ PUT /my_index
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"has_child" : {
|
||||
"type" : "child",
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"max_children": 10,
|
||||
"min_children": 2,
|
||||
"score_mode" : "min"
|
||||
}
|
||||
"query": {
|
||||
"has_child": {
|
||||
"type": "child",
|
||||
"query": {
|
||||
"match_all": {}
|
||||
},
|
||||
"max_children": 10,
|
||||
"min_children": 2,
|
||||
"score_mode": "min"
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -141,18 +141,18 @@ sorts returned documents by the `click_count` field of their child documents.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"has_child" : {
|
||||
"type" : "child",
|
||||
"query" : {
|
||||
"function_score" : {
|
||||
"script_score": {
|
||||
"script": "_score * doc['click_count'].value"
|
||||
}
|
||||
}
|
||||
},
|
||||
"score_mode" : "max"
|
||||
"query": {
|
||||
"has_child": {
|
||||
"type": "child",
|
||||
"query": {
|
||||
"function_score": {
|
||||
"script_score": {
|
||||
"script": "_score * doc['click_count'].value"
|
||||
}
|
||||
}
|
||||
},
|
||||
"score_mode": "max"
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
|
|
@ -27,19 +27,19 @@ field mapping. For example:
|
|||
----
|
||||
PUT /my-index
|
||||
{
|
||||
"mappings": {
|
||||
"properties" : {
|
||||
"my-join-field" : {
|
||||
"type" : "join",
|
||||
"relations": {
|
||||
"parent": "child"
|
||||
}
|
||||
},
|
||||
"tag" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"my-join-field": {
|
||||
"type": "join",
|
||||
"relations": {
|
||||
"parent": "child"
|
||||
}
|
||||
},
|
||||
"tag": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
|
@ -52,18 +52,18 @@ PUT /my-index
|
|||
----
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query": {
|
||||
"has_parent" : {
|
||||
"parent_type" : "parent",
|
||||
"query" : {
|
||||
"term" : {
|
||||
"tag" : {
|
||||
"value" : "Elasticsearch"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"has_parent": {
|
||||
"parent_type": "parent",
|
||||
"query": {
|
||||
"term": {
|
||||
"tag": {
|
||||
"value": "Elasticsearch"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -122,18 +122,18 @@ sorts returned documents by the `view_count` field of their parent documents.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"has_parent" : {
|
||||
"parent_type" : "parent",
|
||||
"score" : true,
|
||||
"query" : {
|
||||
"function_score" : {
|
||||
"script_score": {
|
||||
"script": "_score * doc['view_count'].value"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"has_parent": {
|
||||
"parent_type": "parent",
|
||||
"score": true,
|
||||
"query": {
|
||||
"function_score": {
|
||||
"script_score": {
|
||||
"script": "_score * doc['view_count'].value"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
|
|
@ -13,11 +13,11 @@ the <<mapping-id-field,`_id`>> field.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"ids" : {
|
||||
"values" : ["1", "4", "100"]
|
||||
}
|
||||
"query": {
|
||||
"ids" : {
|
||||
"values" : ["1", "4", "100"]
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -23,9 +23,9 @@ The `_score` can be changed with the `boost` parameter:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_all": { "boost" : 1.2 }
|
||||
}
|
||||
"query": {
|
||||
"match_all": { "boost" : 1.2 }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -39,8 +39,8 @@ This is the inverse of the `match_all` query, which matches no documents.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_none": {}
|
||||
}
|
||||
"query": {
|
||||
"match_none": {}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -13,11 +13,11 @@ is used in a `term` query. The last term is used in a `prefix` query. A
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_bool_prefix" : {
|
||||
"message" : "quick brown f"
|
||||
}
|
||||
"query": {
|
||||
"match_bool_prefix" : {
|
||||
"message" : "quick brown f"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -28,15 +28,15 @@ following `bool` query
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"should": [
|
||||
{ "term": { "message": "quick" }},
|
||||
{ "term": { "message": "brown" }},
|
||||
{ "prefix": { "message": "f"}}
|
||||
]
|
||||
}
|
||||
"query": {
|
||||
"bool" : {
|
||||
"should": [
|
||||
{ "term": { "message": "quick" }},
|
||||
{ "term": { "message": "brown" }},
|
||||
{ "prefix": { "message": "f"}}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -59,14 +59,14 @@ configured with the `analyzer` parameter
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_bool_prefix" : {
|
||||
"message": {
|
||||
"query": "quick brown f",
|
||||
"analyzer": "keyword"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match_bool_prefix": {
|
||||
"message": {
|
||||
"query": "quick brown f",
|
||||
"analyzer": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -22,13 +22,13 @@ brown ferrets` but not `the fox is quick and brown`.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_phrase_prefix" : {
|
||||
"message" : {
|
||||
"query" : "quick brown f"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match_phrase_prefix": {
|
||||
"message": {
|
||||
"query": "quick brown f"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -11,11 +11,11 @@ out of the analyzed text. For example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_phrase" : {
|
||||
"message" : "this is a test"
|
||||
}
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"message": "this is a test"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -30,14 +30,14 @@ definition, or the default search analyzer, for example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match_phrase" : {
|
||||
"message" : {
|
||||
"query" : "this is a test",
|
||||
"analyzer" : "my_analyzer"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match_phrase": {
|
||||
"message": {
|
||||
"query": "this is a test",
|
||||
"analyzer": "my_analyzer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -18,13 +18,13 @@ including options for fuzzy matching.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match" : {
|
||||
"message" : {
|
||||
"query" : "this is a test"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": {
|
||||
"query": "this is a test"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -150,11 +150,11 @@ parameters. For example:
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match" : {
|
||||
"message" : "this is a test"
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": "this is a test"
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -175,14 +175,14 @@ Here is an example with the `operator` parameter:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match" : {
|
||||
"message" : {
|
||||
"query" : "this is a test",
|
||||
"operator" : "and"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": {
|
||||
"query": "this is a test",
|
||||
"operator": "and"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -219,14 +219,14 @@ which does not support fuzzy expansion.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match" : {
|
||||
"message" : {
|
||||
"query" : "this is a testt",
|
||||
"fuzziness": "AUTO"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": {
|
||||
"query": "this is a testt",
|
||||
"fuzziness": "AUTO"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -241,15 +241,15 @@ change that the `zero_terms_query` option can be used, which accepts
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"match" : {
|
||||
"message" : {
|
||||
"query" : "to be or not to be",
|
||||
"operator" : "and",
|
||||
"zero_terms_query": "all"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": {
|
||||
"query": "to be or not to be",
|
||||
"operator": "and",
|
||||
"zero_terms_query": "all"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -19,14 +19,14 @@ fields, limiting the number of selected terms to 12.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"more_like_this" : {
|
||||
"fields" : ["title", "description"],
|
||||
"like" : "Once upon a time",
|
||||
"min_term_freq" : 1,
|
||||
"max_query_terms" : 12
|
||||
}
|
||||
"query": {
|
||||
"more_like_this" : {
|
||||
"fields" : ["title", "description"],
|
||||
"like" : "Once upon a time",
|
||||
"min_term_freq" : 1,
|
||||
"max_query_terms" : 12
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -38,24 +38,24 @@ similar to the one used in the <<docs-multi-get,Multi GET API>>.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"more_like_this" : {
|
||||
"fields" : ["title", "description"],
|
||||
"like" : [
|
||||
{
|
||||
"_index" : "imdb",
|
||||
"_id" : "1"
|
||||
},
|
||||
{
|
||||
"_index" : "imdb",
|
||||
"_id" : "2"
|
||||
},
|
||||
"and potentially some more text here as well"
|
||||
],
|
||||
"min_term_freq" : 1,
|
||||
"max_query_terms" : 12
|
||||
}
|
||||
"query": {
|
||||
"more_like_this": {
|
||||
"fields": [ "title", "description" ],
|
||||
"like": [
|
||||
{
|
||||
"_index": "imdb",
|
||||
"_id": "1"
|
||||
},
|
||||
{
|
||||
"_index": "imdb",
|
||||
"_id": "2"
|
||||
},
|
||||
"and potentially some more text here as well"
|
||||
],
|
||||
"min_term_freq": 1,
|
||||
"max_query_terms": 12
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -67,29 +67,29 @@ present in the index, the syntax is similar to <<docs-termvectors-artificial-doc
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"more_like_this" : {
|
||||
"fields" : ["name.first", "name.last"],
|
||||
"like" : [
|
||||
{
|
||||
"_index" : "marvel",
|
||||
"doc" : {
|
||||
"name": {
|
||||
"first": "Ben",
|
||||
"last": "Grimm"
|
||||
},
|
||||
"_doc": "You got no idea what I'd... what I'd give to be invisible."
|
||||
}
|
||||
"query": {
|
||||
"more_like_this": {
|
||||
"fields": [ "name.first", "name.last" ],
|
||||
"like": [
|
||||
{
|
||||
"_index": "marvel",
|
||||
"doc": {
|
||||
"name": {
|
||||
"first": "Ben",
|
||||
"last": "Grimm"
|
||||
},
|
||||
{
|
||||
"_index" : "marvel",
|
||||
"_id" : "2"
|
||||
}
|
||||
],
|
||||
"min_term_freq" : 1,
|
||||
"max_query_terms" : 12
|
||||
"_doc": "You got no idea what I'd... what I'd give to be invisible."
|
||||
}
|
||||
},
|
||||
{
|
||||
"_index": "marvel",
|
||||
"_id": "2"
|
||||
}
|
||||
],
|
||||
"min_term_freq": 1,
|
||||
"max_query_terms": 12
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -121,27 +121,27 @@ default, but there will be no speed up on analysis for these fields.
|
|||
--------------------------------------------------
|
||||
PUT /imdb
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "text",
|
||||
"term_vector": "yes"
|
||||
},
|
||||
"description": {
|
||||
"type": "text"
|
||||
},
|
||||
"tags": {
|
||||
"type": "text",
|
||||
"fields" : {
|
||||
"raw": {
|
||||
"type" : "text",
|
||||
"analyzer": "keyword",
|
||||
"term_vector" : "yes"
|
||||
}
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "text",
|
||||
"term_vector": "yes"
|
||||
},
|
||||
"description": {
|
||||
"type": "text"
|
||||
},
|
||||
"tags": {
|
||||
"type": "text",
|
||||
"fields": {
|
||||
"raw": {
|
||||
"type": "text",
|
||||
"analyzer": "keyword",
|
||||
"term_vector": "yes"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -23,13 +23,13 @@ mapping. For example:
|
|||
----
|
||||
PUT /my_index
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"obj1" : {
|
||||
"type" : "nested"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"obj1": {
|
||||
"type": "nested"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
|
@ -41,20 +41,20 @@ PUT /my_index
|
|||
----
|
||||
GET /my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"nested" : {
|
||||
"path" : "obj1",
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"must" : [
|
||||
{ "match" : {"obj1.name" : "blue"} },
|
||||
{ "range" : {"obj1.count" : {"gt" : 5}} }
|
||||
]
|
||||
}
|
||||
},
|
||||
"score_mode" : "avg"
|
||||
"query": {
|
||||
"nested": {
|
||||
"path": "obj1",
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{ "match": { "obj1.name": "blue" } },
|
||||
{ "range": { "obj1.count": { "gt": 5 } } }
|
||||
]
|
||||
}
|
||||
},
|
||||
"score_mode": "avg"
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
@ -133,29 +133,29 @@ with nested `make` and `model` fields.
|
|||
----
|
||||
PUT /drivers
|
||||
{
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"driver" : {
|
||||
"type" : "nested",
|
||||
"properties" : {
|
||||
"last_name" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"vehicle" : {
|
||||
"type" : "nested",
|
||||
"properties" : {
|
||||
"make" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"model" : {
|
||||
"type" : "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"driver": {
|
||||
"type": "nested",
|
||||
"properties": {
|
||||
"last_name": {
|
||||
"type": "text"
|
||||
},
|
||||
"vehicle": {
|
||||
"type": "nested",
|
||||
"properties": {
|
||||
"make": {
|
||||
"type": "text"
|
||||
},
|
||||
"model": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -206,24 +206,24 @@ to match documents based on the `make` and `model` fields.
|
|||
----
|
||||
GET /drivers/_search
|
||||
{
|
||||
"query" : {
|
||||
"nested" : {
|
||||
"path" : "driver",
|
||||
"query" : {
|
||||
"nested" : {
|
||||
"path" : "driver.vehicle",
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"must" : [
|
||||
{ "match" : { "driver.vehicle.make" : "Powell Motors" } },
|
||||
{ "match" : { "driver.vehicle.model" : "Canyonero" } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"nested": {
|
||||
"path": "driver",
|
||||
"query": {
|
||||
"nested": {
|
||||
"path": "driver.vehicle",
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{ "match": { "driver.vehicle.make": "Powell Motors" } },
|
||||
{ "match": { "driver.vehicle.model": "Canyonero" } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// TEST[continued]
|
||||
|
|
|
@ -24,16 +24,16 @@ the following example.
|
|||
----
|
||||
PUT /my-index
|
||||
{
|
||||
"mappings": {
|
||||
"properties" : {
|
||||
"my-join-field" : {
|
||||
"type" : "join",
|
||||
"relations": {
|
||||
"my-parent": "my-child"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"my-join-field": {
|
||||
"type": "join",
|
||||
"relations": {
|
||||
"my-parent": "my-child"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
|
|
|
@ -18,16 +18,16 @@ Create an index with two fields:
|
|||
--------------------------------------------------
|
||||
PUT /my-index
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "text"
|
||||
},
|
||||
"query": {
|
||||
"type": "percolator"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "text"
|
||||
},
|
||||
"query": {
|
||||
"type": "percolator"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -46,11 +46,11 @@ Register a query in the percolator:
|
|||
--------------------------------------------------
|
||||
PUT /my-index/_doc/1?refresh
|
||||
{
|
||||
"query" : {
|
||||
"match" : {
|
||||
"message" : "bonsai tree"
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": "bonsai tree"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -61,14 +61,14 @@ Match a document to the registered percolator queries:
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"percolate" : {
|
||||
"field" : "query",
|
||||
"document" : {
|
||||
"message" : "A new bonsai tree in the office"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"document": {
|
||||
"message": "A new bonsai tree in the office"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -159,18 +159,18 @@ the percolator query in a `bool` query's filter clause or in a `constant_score`
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"constant_score": {
|
||||
"filter": {
|
||||
"percolate" : {
|
||||
"field" : "query",
|
||||
"document" : {
|
||||
"message" : "A new bonsai tree in the office"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"constant_score": {
|
||||
"filter": {
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"document": {
|
||||
"message": "A new bonsai tree in the office"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -199,25 +199,25 @@ correlate with the slot in the `documents` array specified in the `percolate` qu
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"percolate" : {
|
||||
"field" : "query",
|
||||
"documents" : [ <1>
|
||||
{
|
||||
"message" : "bonsai tree"
|
||||
},
|
||||
{
|
||||
"message" : "new tree"
|
||||
},
|
||||
{
|
||||
"message" : "the office"
|
||||
},
|
||||
{
|
||||
"message" : "office tree"
|
||||
}
|
||||
]
|
||||
"query": {
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"documents": [ <1>
|
||||
{
|
||||
"message": "bonsai tree"
|
||||
},
|
||||
{
|
||||
"message": "new tree"
|
||||
},
|
||||
{
|
||||
"message": "the office"
|
||||
},
|
||||
{
|
||||
"message": "office tree"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -315,14 +315,14 @@ Percolating an existing document, using the index response as basis to build to
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"percolate" : {
|
||||
"field": "query",
|
||||
"index" : "my-index",
|
||||
"id" : "2",
|
||||
"version" : 1 <1>
|
||||
}
|
||||
"query": {
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"index": "my-index",
|
||||
"id": "2",
|
||||
"version": 1 <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -351,11 +351,11 @@ Save a query:
|
|||
--------------------------------------------------
|
||||
PUT /my-index/_doc/3?refresh
|
||||
{
|
||||
"query" : {
|
||||
"match" : {
|
||||
"message" : "brown fox"
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": "brown fox"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -366,11 +366,11 @@ Save another query:
|
|||
--------------------------------------------------
|
||||
PUT /my-index/_doc/4?refresh
|
||||
{
|
||||
"query" : {
|
||||
"match" : {
|
||||
"message" : "lazy dog"
|
||||
}
|
||||
"query": {
|
||||
"match": {
|
||||
"message": "lazy dog"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -381,19 +381,19 @@ Execute a search request with the `percolate` query and highlighting enabled:
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"percolate" : {
|
||||
"field": "query",
|
||||
"document" : {
|
||||
"message" : "The quick brown fox jumps over the lazy dog"
|
||||
}
|
||||
}
|
||||
},
|
||||
"highlight": {
|
||||
"fields": {
|
||||
"message": {}
|
||||
"query": {
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"document": {
|
||||
"message": "The quick brown fox jumps over the lazy dog"
|
||||
}
|
||||
}
|
||||
},
|
||||
"highlight": {
|
||||
"fields": {
|
||||
"message": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -477,30 +477,30 @@ When percolating multiple documents at the same time like the request below then
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"percolate" : {
|
||||
"field": "query",
|
||||
"documents" : [
|
||||
{
|
||||
"message" : "bonsai tree"
|
||||
},
|
||||
{
|
||||
"message" : "new tree"
|
||||
},
|
||||
{
|
||||
"message" : "the office"
|
||||
},
|
||||
{
|
||||
"message" : "office tree"
|
||||
}
|
||||
]
|
||||
"query": {
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"documents": [
|
||||
{
|
||||
"message": "bonsai tree"
|
||||
},
|
||||
{
|
||||
"message": "new tree"
|
||||
},
|
||||
{
|
||||
"message": "the office"
|
||||
},
|
||||
{
|
||||
"message": "office tree"
|
||||
}
|
||||
},
|
||||
"highlight": {
|
||||
"fields": {
|
||||
"message": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"highlight": {
|
||||
"fields": {
|
||||
"message": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
@ -570,30 +570,30 @@ It is possible to specify multiple `percolate` queries in a single search reques
|
|||
--------------------------------------------------
|
||||
GET /my-index/_search
|
||||
{
|
||||
"query" : {
|
||||
"bool" : {
|
||||
"should" : [
|
||||
{
|
||||
"percolate" : {
|
||||
"field" : "query",
|
||||
"document" : {
|
||||
"message" : "bonsai tree"
|
||||
},
|
||||
"name": "query1" <1>
|
||||
}
|
||||
},
|
||||
{
|
||||
"percolate" : {
|
||||
"field" : "query",
|
||||
"document" : {
|
||||
"message" : "tulip flower"
|
||||
},
|
||||
"name": "query2" <1>
|
||||
}
|
||||
}
|
||||
]
|
||||
"query": {
|
||||
"bool": {
|
||||
"should": [
|
||||
{
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"document": {
|
||||
"message": "bonsai tree"
|
||||
},
|
||||
"name": "query1" <1>
|
||||
}
|
||||
},
|
||||
{
|
||||
"percolate": {
|
||||
"field": "query",
|
||||
"document": {
|
||||
"message": "tulip flower"
|
||||
},
|
||||
"name": "query2" <1>
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[continued]
|
||||
|
|
|
@ -14,16 +14,16 @@ the <<mapping-id-field,`_id`>> field.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"pinned" : {
|
||||
"ids" : ["1", "4", "100"],
|
||||
"organic" : {
|
||||
"match":{
|
||||
"description": "brown shoes"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"pinned": {
|
||||
"ids": [ "1", "4", "100" ],
|
||||
"organic": {
|
||||
"match": {
|
||||
"description": "iphone"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -16,13 +16,13 @@ that begins with `ki`.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"prefix": {
|
||||
"user": {
|
||||
"value": "ki"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"prefix": {
|
||||
"user": {
|
||||
"value": "ki"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -53,9 +53,9 @@ You can simplify the `prefix` query syntax by combining the `<field>` and
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"prefix" : { "user" : "ki" }
|
||||
}
|
||||
"query": {
|
||||
"prefix" : { "user" : "ki" }
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -42,12 +42,12 @@ whitespace as an operator, `new york city` is passed as-is to the analyzer.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"query" : "(new york city) OR (big apple)",
|
||||
"default_field" : "content"
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"query": "(new york city) OR (big apple)",
|
||||
"default_field": "content"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -265,12 +265,12 @@ For example, the following query
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["content", "name"],
|
||||
"query" : "this AND that"
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [ "content", "name" ],
|
||||
"query": "this AND that"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -281,11 +281,11 @@ matches the same words as
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"query": "(content:this OR name:this) AND (content:that OR name:that)"
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"query": "(content:this OR name:this) AND (content:that OR name:that)"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -297,13 +297,13 @@ For example (the `name` is boosted by 5 using `^5` notation):
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["content", "name^5"],
|
||||
"query" : "this AND that OR thus",
|
||||
"tie_breaker" : 0
|
||||
}
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["content", "name^5"],
|
||||
"query" : "this AND that OR thus",
|
||||
"tie_breaker" : 0
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -316,12 +316,12 @@ search on all "city" fields:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["city.*"],
|
||||
"query" : "this AND that OR thus"
|
||||
}
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["city.*"],
|
||||
"query" : "this AND that OR thus"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -333,11 +333,11 @@ string itself (properly escaping the `*` sign), for example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"query" : "city.\\*:(this AND that OR thus)"
|
||||
}
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"query" : "city.\\*:(this AND that OR thus)"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -352,12 +352,12 @@ introduced fields included). For example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["content", "name.*^5"],
|
||||
"query" : "this AND that OR thus"
|
||||
}
|
||||
"query": {
|
||||
"query_string" : {
|
||||
"fields" : ["content", "name.*^5"],
|
||||
"query" : "this AND that OR thus"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -446,15 +446,15 @@ many "should" clauses in the resulting query should match.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title"
|
||||
],
|
||||
"query": "this that thus",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title"
|
||||
],
|
||||
"query": "this that thus",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -472,16 +472,16 @@ in the single field `title`.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title",
|
||||
"content"
|
||||
],
|
||||
"query": "this that thus",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title",
|
||||
"content"
|
||||
],
|
||||
"query": "this that thus",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -496,16 +496,16 @@ that matches documents with the disjunction max over the fields `title` and
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title",
|
||||
"content"
|
||||
],
|
||||
"query": "this OR that OR thus",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title",
|
||||
"content"
|
||||
],
|
||||
"query": "this OR that OR thus",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -528,17 +528,17 @@ analyzer are grouped together when the input is analyzed.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title",
|
||||
"content"
|
||||
],
|
||||
"query": "this OR that OR thus",
|
||||
"type": "cross_fields",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
"query": {
|
||||
"query_string": {
|
||||
"fields": [
|
||||
"title",
|
||||
"content"
|
||||
],
|
||||
"query": "this OR that OR thus",
|
||||
"type": "cross_fields",
|
||||
"minimum_should_match": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -16,15 +16,15 @@ between `10` and `20`.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"range" : {
|
||||
"age" : {
|
||||
"gte" : 10,
|
||||
"lte" : 20,
|
||||
"boost" : 2.0
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"range": {
|
||||
"age": {
|
||||
"gte": 10,
|
||||
"lte": 20,
|
||||
"boost": 2.0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -157,14 +157,14 @@ contains a date between today and yesterday.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"range" : {
|
||||
"timestamp" : {
|
||||
"gte" : "now-1d/d",
|
||||
"lt" : "now/d"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"range": {
|
||||
"timestamp": {
|
||||
"gte": "now-1d/d",
|
||||
"lt": "now/d"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -23,16 +23,16 @@ terms can include `ky`, `kay`, and `kimchy`.
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"regexp": {
|
||||
"user": {
|
||||
"value": "k.*y",
|
||||
"flags" : "ALL",
|
||||
"max_determinized_states": 10000,
|
||||
"rewrite": "constant_score"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"regexp": {
|
||||
"user": {
|
||||
"value": "k.*y",
|
||||
"flags": "ALL",
|
||||
"max_determinized_states": 10000,
|
||||
"rewrite": "constant_score"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -18,18 +18,18 @@ WARNING: Using scripts can result in slower search speeds. See
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"filter" : {
|
||||
"script" : {
|
||||
"script" : {
|
||||
"source": "doc['num1'].value > 1",
|
||||
"lang": "painless"
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"script": {
|
||||
"script": {
|
||||
"source": "doc['num1'].value > 1",
|
||||
"lang": "painless"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
@ -55,21 +55,21 @@ in the script's `params` parameter. For example:
|
|||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
"filter" : {
|
||||
"script" : {
|
||||
"script" : {
|
||||
"source" : "doc['num1'].value > params.param1",
|
||||
"lang" : "painless",
|
||||
"params" : {
|
||||
"param1" : 5
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"script": {
|
||||
"script": {
|
||||
"source": "doc['num1'].value > params.param1",
|
||||
"lang": "painless",
|
||||
"params": {
|
||||
"param1": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
|
|
@ -18,16 +18,16 @@ The following `script_score` query assigns each returned document a score equal
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query" : {
|
||||
"script_score" : {
|
||||
"query" : {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script" : {
|
||||
"source" : "doc['likes'].value / 10 "
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"script_score": {
|
||||
"query": {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script": {
|
||||
"source": "doc['likes'].value / 10 "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -345,23 +345,23 @@ Using an <<search-explain, explain request>> provides an explanation of how the
|
|||
--------------------------------------------------
|
||||
GET /twitter/_explain/0
|
||||
{
|
||||
"query" : {
|
||||
"script_score" : {
|
||||
"query" : {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script" : {
|
||||
"source" : """
|
||||
long likes = doc['likes'].value;
|
||||
double normalizedLikes = likes / 10;
|
||||
if (explanation != null) {
|
||||
explanation.set('normalized likes = likes / 10 = ' + likes + ' / 10 = ' + normalizedLikes);
|
||||
}
|
||||
return normalizedLikes;
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"script_score": {
|
||||
"query": {
|
||||
"match": { "message": "elasticsearch" }
|
||||
},
|
||||
"script": {
|
||||
"source": """
|
||||
long likes = doc['likes'].value;
|
||||
double normalizedLikes = likes / 10;
|
||||
if (explanation != null) {
|
||||
explanation.set('normalized likes = likes / 10 = ' + likes + ' / 10 = ' + normalizedLikes);
|
||||
}
|
||||
return normalizedLikes;
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TEST[setup:twitter]
|
||||
|
|
|
@ -28,22 +28,22 @@ Given the following index:
|
|||
--------------------------------------------------
|
||||
PUT /example
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"geometry": {
|
||||
"type": "shape"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"geometry": {
|
||||
"type": "shape"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /example/_doc/1?refresh=wait_for
|
||||
{
|
||||
"name": "Lucky Landing",
|
||||
"geometry": {
|
||||
"type": "point",
|
||||
"coordinates": [1355.400544, 5255.530286]
|
||||
}
|
||||
"name": "Lucky Landing",
|
||||
"geometry": {
|
||||
"type": "point",
|
||||
"coordinates": [ 1355.400544, 5255.530286 ]
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
@ -55,17 +55,17 @@ The following query will find the point using the Elasticsearch's
|
|||
--------------------------------------------------
|
||||
GET /example/_search
|
||||
{
|
||||
"query":{
|
||||
"query": {
|
||||
"shape": {
|
||||
"geometry": {
|
||||
"shape": {
|
||||
"geometry": {
|
||||
"shape": {
|
||||
"type": "envelope",
|
||||
"coordinates" : [[1355.0, 5355.0], [1400.0, 5200.0]]
|
||||
},
|
||||
"relation": "within"
|
||||
}
|
||||
}
|
||||
"type": "envelope",
|
||||
"coordinates": [ [ 1355.0, 5355.0 ], [ 1400.0, 5200.0 ] ]
|
||||
},
|
||||
"relation": "within"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -134,36 +134,36 @@ shape:
|
|||
--------------------------------------------------
|
||||
PUT /shapes
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"geometry": {
|
||||
"type": "shape"
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"geometry": {
|
||||
"type": "shape"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT /shapes/_doc/footprint
|
||||
{
|
||||
"geometry": {
|
||||
"type": "envelope",
|
||||
"coordinates" : [[1355.0, 5355.0], [1400.0, 5200.0]]
|
||||
}
|
||||
"geometry": {
|
||||
"type": "envelope",
|
||||
"coordinates": [ [ 1355.0, 5355.0 ], [ 1400.0, 5200.0 ] ]
|
||||
}
|
||||
}
|
||||
|
||||
GET /example/_search
|
||||
{
|
||||
"query": {
|
||||
"shape": {
|
||||
"geometry": {
|
||||
"indexed_shape": {
|
||||
"index": "shapes",
|
||||
"id": "footprint",
|
||||
"path": "geometry"
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"shape": {
|
||||
"geometry": {
|
||||
"indexed_shape": {
|
||||
"index": "shapes",
|
||||
"id": "footprint",
|
||||
"path": "geometry"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -156,12 +156,12 @@ value. For example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"simple_query_string" : {
|
||||
"fields" : ["content"],
|
||||
"query" : "foo bar -baz"
|
||||
}
|
||||
"query": {
|
||||
"simple_query_string": {
|
||||
"fields": [ "content" ],
|
||||
"query": "foo bar -baz"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -184,12 +184,12 @@ and `PREFIX`.
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"simple_query_string" : {
|
||||
"query" : "foo | bar + baz*",
|
||||
"flags" : "OR|AND|PREFIX"
|
||||
}
|
||||
"query": {
|
||||
"simple_query_string": {
|
||||
"query": "foo | bar + baz*",
|
||||
"flags": "OR|AND|PREFIX"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -11,23 +11,23 @@ query maps to Lucene `SpanContainingQuery`. Here is an example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"span_containing" : {
|
||||
"little" : {
|
||||
"span_term" : { "field1" : "foo" }
|
||||
},
|
||||
"big" : {
|
||||
"span_near" : {
|
||||
"clauses" : [
|
||||
{ "span_term" : { "field1" : "bar" } },
|
||||
{ "span_term" : { "field1" : "baz" } }
|
||||
],
|
||||
"slop" : 5,
|
||||
"in_order" : true
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"span_containing": {
|
||||
"little": {
|
||||
"span_term": { "field1": "foo" }
|
||||
},
|
||||
"big": {
|
||||
"span_near": {
|
||||
"clauses": [
|
||||
{ "span_term": { "field1": "bar" } },
|
||||
{ "span_term": { "field1": "baz" } }
|
||||
],
|
||||
"slop": 5,
|
||||
"in_order": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -11,14 +11,14 @@ to Lucene `SpanFirstQuery`. Here is an example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"span_first" : {
|
||||
"match" : {
|
||||
"span_term" : { "user" : "kimchy" }
|
||||
},
|
||||
"end" : 3
|
||||
}
|
||||
"query": {
|
||||
"span_first": {
|
||||
"match": {
|
||||
"span_term": { "user": "kimchy" }
|
||||
},
|
||||
"end": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -12,13 +12,13 @@ it can be nested. Example:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"span_multi":{
|
||||
"match":{
|
||||
"prefix" : { "user" : { "value" : "ki" } }
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"span_multi": {
|
||||
"match": {
|
||||
"prefix": { "user": { "value": "ki" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -28,13 +28,13 @@ A boost can also be associated with the query:
|
|||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"span_multi":{
|
||||
"match":{
|
||||
"prefix" : { "user" : { "value" : "ki", "boost" : 1.08 } }
|
||||
}
|
||||
}
|
||||
"query": {
|
||||
"span_multi": {
|
||||
"match": {
|
||||
"prefix": { "user": { "value": "ki", "boost": 1.08 } }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue