[DOCS] Reformat snippets to use two-space indents (#59973) (#59994)

This commit is contained in:
James Rodewig 2020-07-21 15:49:58 -04:00 committed by GitHub
parent 606b7ea139
commit b302b09b85
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
160 changed files with 5055 additions and 5054 deletions

View File

@ -45,23 +45,23 @@ Let's try and index some twitter like information. First, let's index some tweet
---- ----
curl -XPUT 'http://localhost:9200/twitter/_doc/1?pretty' -H 'Content-Type: application/json' -d ' curl -XPUT 'http://localhost:9200/twitter/_doc/1?pretty' -H 'Content-Type: application/json' -d '
{ {
"user": "kimchy", "user": "kimchy",
"post_date": "2009-11-15T13:12:00", "post_date": "2009-11-15T13:12:00",
"message": "Trying out Elasticsearch, so far so good?" "message": "Trying out Elasticsearch, so far so good?"
}' }'
curl -XPUT 'http://localhost:9200/twitter/_doc/2?pretty' -H 'Content-Type: application/json' -d ' curl -XPUT 'http://localhost:9200/twitter/_doc/2?pretty' -H 'Content-Type: application/json' -d '
{ {
"user": "kimchy", "user": "kimchy",
"post_date": "2009-11-15T14:12:12", "post_date": "2009-11-15T14:12:12",
"message": "Another tweet, will it be indexed?" "message": "Another tweet, will it be indexed?"
}' }'
curl -XPUT 'http://localhost:9200/twitter/_doc/3?pretty' -H 'Content-Type: application/json' -d ' curl -XPUT 'http://localhost:9200/twitter/_doc/3?pretty' -H 'Content-Type: application/json' -d '
{ {
"user": "elastic", "user": "elastic",
"post_date": "2010-01-15T01:46:38", "post_date": "2010-01-15T01:46:38",
"message": "Building the site, should be kewl" "message": "Building the site, should be kewl"
}' }'
---- ----
@ -87,9 +87,9 @@ We can also use the JSON query language Elasticsearch provides instead of a quer
---- ----
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d ' curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
{ {
"query" : { "query" : {
"match" : { "user": "kimchy" } "match" : { "user": "kimchy" }
} }
}' }'
---- ----
@ -98,9 +98,9 @@ Just for kicks, let's get all the documents stored (we should see the tweet from
---- ----
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d ' curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
{ {
"query" : { "query" : {
"match_all" : {} "match_all" : {}
} }
}' }'
---- ----
@ -109,11 +109,11 @@ We can also do range search (the `post_date` was automatically identified as dat
---- ----
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d ' curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
{ {
"query" : { "query" : {
"range" : { "range" : {
"post_date" : { "from" : "2009-11-15T13:00:00", "to" : "2009-11-15T14:00:00" } "post_date" : { "from" : "2009-11-15T13:00:00", "to" : "2009-11-15T14:00:00" }
}
} }
}
}' }'
---- ----
@ -130,16 +130,16 @@ Another way to define our simple twitter system is to have a different index per
---- ----
curl -XPUT 'http://localhost:9200/kimchy/_doc/1?pretty' -H 'Content-Type: application/json' -d ' curl -XPUT 'http://localhost:9200/kimchy/_doc/1?pretty' -H 'Content-Type: application/json' -d '
{ {
"user": "kimchy", "user": "kimchy",
"post_date": "2009-11-15T13:12:00", "post_date": "2009-11-15T13:12:00",
"message": "Trying out Elasticsearch, so far so good?" "message": "Trying out Elasticsearch, so far so good?"
}' }'
curl -XPUT 'http://localhost:9200/kimchy/_doc/2?pretty' -H 'Content-Type: application/json' -d ' curl -XPUT 'http://localhost:9200/kimchy/_doc/2?pretty' -H 'Content-Type: application/json' -d '
{ {
"user": "kimchy", "user": "kimchy",
"post_date": "2009-11-15T14:12:12", "post_date": "2009-11-15T14:12:12",
"message": "Another tweet, will it be indexed?" "message": "Another tweet, will it be indexed?"
}' }'
---- ----
@ -150,10 +150,10 @@ Complete control on the index level is allowed. As an example, in the above case
---- ----
curl -XPUT http://localhost:9200/another_user?pretty -H 'Content-Type: application/json' -d ' curl -XPUT http://localhost:9200/another_user?pretty -H 'Content-Type: application/json' -d '
{ {
"settings" : { "settings" : {
"index.number_of_shards" : 2, "index.number_of_shards" : 2,
"index.number_of_replicas" : 1 "index.number_of_replicas" : 1
} }
}' }'
---- ----
@ -163,9 +163,9 @@ index (twitter user), for example:
---- ----
curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -H 'Content-Type: application/json' -d ' curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -H 'Content-Type: application/json' -d '
{ {
"query" : { "query" : {
"match_all" : {} "match_all" : {}
} }
}' }'
---- ----
@ -174,9 +174,9 @@ Or on all the indices:
---- ----
curl -XGET 'http://localhost:9200/_search?pretty=true' -H 'Content-Type: application/json' -d ' curl -XGET 'http://localhost:9200/_search?pretty=true' -H 'Content-Type: application/json' -d '
{ {
"query" : { "query" : {
"match_all" : {} "match_all" : {}
} }
}' }'
---- ----

View File

@ -145,16 +145,16 @@ The following <<indices-create-index,create index API>> request uses the
---- ----
PUT sample_example PUT sample_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_sample_analyzer" : { "my_sample_analyzer": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["sample"] "filter": [ "sample" ]
}
}
} }
}
} }
}
} }
---- ----
// TEST[skip: REMOVE THIS COMMENT.] // TEST[skip: REMOVE THIS COMMENT.]
@ -212,22 +212,22 @@ For example, the following request creates a custom `sample` filter with
---- ----
PUT sample_example PUT sample_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_custom_analyzer" : { "my_custom_analyzer": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["my_custom_sample_token_filter"] "filter": [ "my_custom_sample_token_filter" ]
}
},
"filter" : {
"my_custom_sample_token_filter" : {
"type" : "sample",
"foo" : true
}
}
} }
},
"filter": {
"my_custom_sample_token_filter": {
"type": "sample",
"foo": true
}
}
} }
}
} }
---- ----
// TEST[skip: REMOVE THIS COMMENT.] // TEST[skip: REMOVE THIS COMMENT.]

View File

@ -77,15 +77,15 @@ apostrophe token filter to configure a new
-------------------------------------------------- --------------------------------------------------
PUT /apostrophe_example PUT /apostrophe_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"standard_apostrophe" : { "standard_apostrophe": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["apostrophe"] "filter": [ "apostrophe" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -83,16 +83,16 @@ The following <<indices-create-index,create index API>> request uses the
-------------------------------------------------- --------------------------------------------------
PUT /asciifold_example PUT /asciifold_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"standard_asciifolding" : { "standard_asciifolding": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["asciifolding"] "filter": [ "asciifolding" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -118,21 +118,21 @@ For example, the following request creates a custom `asciifolding` filter with
-------------------------------------------------- --------------------------------------------------
PUT /asciifold_example PUT /asciifold_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"standard_asciifolding" : { "standard_asciifolding": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["my_ascii_folding"] "filter": [ "my_ascii_folding" ]
}
},
"filter" : {
"my_ascii_folding" : {
"type" : "asciifolding",
"preserve_original" : true
}
}
} }
},
"filter": {
"my_ascii_folding": {
"type": "asciifolding",
"preserve_original": true
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -126,16 +126,16 @@ CJK bigram token filter to configure a new
-------------------------------------------------- --------------------------------------------------
PUT /cjk_bigram_example PUT /cjk_bigram_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"standard_cjk_bigram" : { "standard_cjk_bigram": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["cjk_bigram"] "filter": [ "cjk_bigram" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -176,26 +176,26 @@ parameters.
-------------------------------------------------- --------------------------------------------------
PUT /cjk_bigram_example PUT /cjk_bigram_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"han_bigrams" : { "han_bigrams": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["han_bigrams_filter"] "filter": [ "han_bigrams_filter" ]
}
},
"filter" : {
"han_bigrams_filter" : {
"type" : "cjk_bigram",
"ignored_scripts": [
"hangul",
"hiragana",
"katakana"
],
"output_unigrams" : true
}
}
} }
},
"filter": {
"han_bigrams_filter": {
"type": "cjk_bigram",
"ignored_scripts": [
"hangul",
"hiragana",
"katakana"
],
"output_unigrams": true
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -69,15 +69,15 @@ CJK width token filter to configure a new
-------------------------------------------------- --------------------------------------------------
PUT /cjk_width_example PUT /cjk_width_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"standard_cjk_width" : { "standard_cjk_width": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["cjk_width"] "filter": [ "cjk_width" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -132,16 +132,16 @@ classic token filter to configure a new
-------------------------------------------------- --------------------------------------------------
PUT /classic_example PUT /classic_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"classic_analyzer" : { "classic_analyzer": {
"tokenizer" : "classic", "tokenizer": "classic",
"filter" : ["classic"] "filter": [ "classic" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -126,22 +126,22 @@ The following <<indices-create-index,create index API>> request uses the
-------------------------------------------------- --------------------------------------------------
PUT /common_grams_example PUT /common_grams_example
{ {
"settings": { "settings": {
"analysis": { "analysis": {
"analyzer": { "analyzer": {
"index_grams": { "index_grams": {
"tokenizer": "whitespace", "tokenizer": "whitespace",
"filter": ["common_grams"] "filter": [ "common_grams" ]
}
},
"filter": {
"common_grams": {
"type": "common_grams",
"common_words": ["a", "is", "the"]
}
}
} }
},
"filter": {
"common_grams": {
"type": "common_grams",
"common_words": [ "a", "is", "the" ]
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -206,23 +206,23 @@ For example, the following request creates a custom `common_grams` filter with
-------------------------------------------------- --------------------------------------------------
PUT /common_grams_example PUT /common_grams_example
{ {
"settings": { "settings": {
"analysis": { "analysis": {
"analyzer": { "analyzer": {
"index_grams": { "index_grams": {
"tokenizer": "whitespace", "tokenizer": "whitespace",
"filter": ["common_grams_query"] "filter": [ "common_grams_query" ]
}
},
"filter": {
"common_grams_query": {
"type": "common_grams",
"common_words": ["a", "is", "the"],
"ignore_case": true,
"query_mode": true
}
}
} }
},
"filter": {
"common_grams_query": {
"type": "common_grams",
"common_words": [ "a", "is", "the" ],
"ignore_case": true,
"query_mode": true
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -75,15 +75,15 @@ The following <<indices-create-index,create index API>> request uses the
-------------------------------------------------- --------------------------------------------------
PUT /decimal_digit_example PUT /decimal_digit_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"whitespace_decimal_digit" : { "whitespace_decimal_digit": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["decimal_digit"] "filter": [ "decimal_digit" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -96,16 +96,16 @@ The following <<indices-create-index,create index API>> request uses the
-------------------------------------------------- --------------------------------------------------
PUT /elision_example PUT /elision_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"whitespace_elision" : { "whitespace_elision": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["elision"] "filter": [ "elision" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -165,22 +165,22 @@ and `j'` elisions:
-------------------------------------------------- --------------------------------------------------
PUT /elision_case_sensitive_example PUT /elision_case_sensitive_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"default" : { "default": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["elision_case_sensitive"] "filter": [ "elision_case_sensitive" ]
}
},
"filter" : {
"elision_case_sensitive" : {
"type" : "elision",
"articles" : ["l", "m", "t", "qu", "n", "s", "j"],
"articles_case": true
}
}
} }
},
"filter": {
"elision_case_sensitive": {
"type": "elision",
"articles": [ "l", "m", "t", "qu", "n", "s", "j" ],
"articles_case": true
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -82,16 +82,16 @@ The following <<indices-create-index,create index API>> request uses the
-------------------------------------------------- --------------------------------------------------
PUT lowercase_example PUT lowercase_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"whitespace_lowercase" : { "whitespace_lowercase": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["lowercase"] "filter": [ "lowercase" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -36,22 +36,22 @@ You can set it up like:
-------------------------------------------------- --------------------------------------------------
PUT /multiplexer_example PUT /multiplexer_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_analyzer" : { "my_analyzer": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : [ "my_multiplexer" ] "filter": [ "my_multiplexer" ]
}
},
"filter" : {
"my_multiplexer" : {
"type" : "multiplexer",
"filters" : [ "lowercase", "lowercase, porter_stem" ]
}
}
} }
},
"filter": {
"my_multiplexer": {
"type": "multiplexer",
"filters": [ "lowercase", "lowercase, porter_stem" ]
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -17,21 +17,21 @@ For example:
-------------------------------------------------- --------------------------------------------------
PUT /my_index PUT /my_index
{ {
"settings": { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_analyzer" : { "my_analyzer": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["lowercase", "my_snow"] "filter": [ "lowercase", "my_snow" ]
}
},
"filter" : {
"my_snow" : {
"type" : "snowball",
"language" : "Lovins"
}
}
} }
},
"filter": {
"my_snow": {
"type": "snowball",
"language": "Lovins"
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -25,22 +25,22 @@ Here is an example:
-------------------------------------------------- --------------------------------------------------
PUT /my_index PUT /my_index
{ {
"settings": { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_analyzer" : { "my_analyzer": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["lowercase", "custom_stems", "porter_stem"] "filter": [ "lowercase", "custom_stems", "porter_stem" ]
}
},
"filter" : {
"custom_stems" : {
"type" : "stemmer_override",
"rules_path" : "analysis/stemmer_override.txt"
}
}
} }
},
"filter": {
"custom_stems": {
"type": "stemmer_override",
"rules_path": "analysis/stemmer_override.txt"
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -57,24 +57,24 @@ You can also define the overrides rules inline:
-------------------------------------------------- --------------------------------------------------
PUT /my_index PUT /my_index
{ {
"settings": { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_analyzer" : { "my_analyzer": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["lowercase", "custom_stems", "porter_stem"] "filter": [ "lowercase", "custom_stems", "porter_stem" ]
}
},
"filter" : {
"custom_stems" : {
"type" : "stemmer_override",
"rules" : [
"running, runs => run",
"stemmer => stemmer"
]
}
}
} }
},
"filter": {
"custom_stems": {
"type": "stemmer_override",
"rules": [
"running, runs => run",
"stemmer => stemmer"
]
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -26,24 +26,24 @@ Here is an example:
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"search_synonyms" : { "search_synonyms": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["graph_synonyms"] "filter": [ "graph_synonyms" ]
} }
}, },
"filter" : { "filter": {
"graph_synonyms" : { "graph_synonyms": {
"type" : "synonym_graph", "type": "synonym_graph",
"synonyms_path" : "analysis/synonym.txt" "synonyms_path": "analysis/synonym.txt"
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -61,29 +61,29 @@ to note that only those synonym rules which cannot get parsed are ignored. For i
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"synonym" : { "synonym": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["my_stop", "synonym_graph"] "filter": [ "my_stop", "synonym_graph" ]
} }
}, },
"filter" : { "filter": {
"my_stop": { "my_stop": {
"type" : "stop", "type": "stop",
"stopwords": ["bar"] "stopwords": [ "bar" ]
}, },
"synonym_graph" : { "synonym_graph": {
"type" : "synonym_graph", "type": "synonym_graph",
"lenient": true, "lenient": true,
"synonyms" : ["foo, bar => baz"] "synonyms": [ "foo, bar => baz" ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -121,21 +121,21 @@ configuration file (note use of `synonyms` instead of `synonyms_path`):
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"filter" : { "filter": {
"synonym" : { "synonym": {
"type" : "synonym_graph", "type": "synonym_graph",
"synonyms" : [ "synonyms": [
"lol, laughing out loud", "lol, laughing out loud",
"universe, cosmos" "universe, cosmos"
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -152,23 +152,23 @@ declared using `format`:
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"filter" : { "filter": {
"synonym" : { "synonym": {
"type" : "synonym_graph", "type": "synonym_graph",
"format" : "wordnet", "format": "wordnet",
"synonyms" : [ "synonyms": [
"s(100000001,1,'abstain',v,1,0).", "s(100000001,1,'abstain',v,1,0).",
"s(100000001,2,'refrain',v,1,0).", "s(100000001,2,'refrain',v,1,0).",
"s(100000001,3,'desist',v,1,0)." "s(100000001,3,'desist',v,1,0)."
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -12,24 +12,24 @@ Here is an example:
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"synonym" : { "synonym": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["synonym"] "filter": [ "synonym" ]
} }
}, },
"filter" : { "filter": {
"synonym" : { "synonym": {
"type" : "synonym", "type": "synonym",
"synonyms_path" : "analysis/synonym.txt" "synonyms_path": "analysis/synonym.txt"
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -51,29 +51,29 @@ to note that only those synonym rules which cannot get parsed are ignored. For i
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"synonym" : { "synonym": {
"tokenizer" : "standard", "tokenizer": "standard",
"filter" : ["my_stop", "synonym"] "filter": [ "my_stop", "synonym" ]
} }
}, },
"filter" : { "filter": {
"my_stop": { "my_stop": {
"type" : "stop", "type": "stop",
"stopwords": ["bar"] "stopwords": [ "bar" ]
}, },
"synonym" : { "synonym": {
"type" : "synonym", "type": "synonym",
"lenient": true, "lenient": true,
"synonyms" : ["foo, bar => baz"] "synonyms": [ "foo, bar => baz" ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -112,21 +112,21 @@ configuration file (note use of `synonyms` instead of `synonyms_path`):
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"filter" : { "filter": {
"synonym" : { "synonym": {
"type" : "synonym", "type": "synonym",
"synonyms" : [ "synonyms": [
"i-pod, i pod => ipod", "i-pod, i pod => ipod",
"universe, cosmos" "universe, cosmos"
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -143,23 +143,23 @@ declared using `format`:
-------------------------------------------------- --------------------------------------------------
PUT /test_index PUT /test_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"filter" : { "filter": {
"synonym" : { "synonym": {
"type" : "synonym", "type": "synonym",
"format" : "wordnet", "format": "wordnet",
"synonyms" : [ "synonyms": [
"s(100000001,1,'abstain',v,1,0).", "s(100000001,1,'abstain',v,1,0).",
"s(100000001,2,'refrain',v,1,0).", "s(100000001,2,'refrain',v,1,0).",
"s(100000001,3,'desist',v,1,0)." "s(100000001,3,'desist',v,1,0)."
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -92,15 +92,15 @@ The following <<indices-create-index,create index API>> request uses the
-------------------------------------------------- --------------------------------------------------
PUT uppercase_example PUT uppercase_example
{ {
"settings" : { "settings": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"whitespace_uppercase" : { "whitespace_uppercase": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["uppercase"] "filter": [ "uppercase" ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -145,26 +145,26 @@ If the follower index is `active`, the API returns the following results:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"follower_indices" : [ "follower_indices": [
{ {
"follower_index" : "follower_index", "follower_index": "follower_index",
"remote_cluster" : "remote_cluster", "remote_cluster": "remote_cluster",
"leader_index" : "leader_index", "leader_index": "leader_index",
"status" : "active", "status": "active",
"parameters" : { "parameters": {
"max_read_request_operation_count" : 5120, "max_read_request_operation_count": 5120,
"max_read_request_size" : "32mb", "max_read_request_size": "32mb",
"max_outstanding_read_requests" : 12, "max_outstanding_read_requests": 12,
"max_write_request_operation_count" : 5120, "max_write_request_operation_count": 5120,
"max_write_request_size" : "9223372036854775807b", "max_write_request_size": "9223372036854775807b",
"max_outstanding_write_requests" : 9, "max_outstanding_write_requests": 9,
"max_write_buffer_count" : 2147483647, "max_write_buffer_count": 2147483647,
"max_write_buffer_size" : "512mb", "max_write_buffer_size": "512mb",
"max_retry_delay" : "500ms", "max_retry_delay": "500ms",
"read_poll_timeout" : "1m" "read_poll_timeout": "1m"
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
@ -187,13 +187,13 @@ If the follower index is `paused`, the API returns the following results:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"follower_indices" : [ "follower_indices": [
{ {
"follower_index" : "follower_index", "follower_index": "follower_index",
"remote_cluster" : "remote_cluster", "remote_cluster": "remote_cluster",
"leader_index" : "leader_index", "leader_index": "leader_index",
"status" : "paused" "status": "paused"
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -185,20 +185,20 @@ This is a short example of a simple reroute API call:
-------------------------------------------------- --------------------------------------------------
POST /_cluster/reroute POST /_cluster/reroute
{ {
"commands" : [ "commands": [
{ {
"move" : { "move": {
"index" : "test", "shard" : 0, "index": "test", "shard": 0,
"from_node" : "node1", "to_node" : "node2" "from_node": "node1", "to_node": "node2"
} }
}, },
{ {
"allocate_replica" : { "allocate_replica": {
"index" : "test", "shard" : 1, "index": "test", "shard": 1,
"node" : "node3" "node": "node3"
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[skip:doc tests run with only a single node] // TEST[skip:doc tests run with only a single node]

View File

@ -62,9 +62,9 @@ An example of a persistent update:
-------------------------------------------------- --------------------------------------------------
PUT /_cluster/settings PUT /_cluster/settings
{ {
"persistent" : { "persistent" : {
"indices.recovery.max_bytes_per_sec" : "50mb" "indices.recovery.max_bytes_per_sec" : "50mb"
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -75,9 +75,9 @@ An example of a transient update:
-------------------------------------------------- --------------------------------------------------
PUT /_cluster/settings?flat_settings=true PUT /_cluster/settings?flat_settings=true
{ {
"transient" : { "transient" : {
"indices.recovery.max_bytes_per_sec" : "20mb" "indices.recovery.max_bytes_per_sec" : "20mb"
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -88,11 +88,11 @@ the transient example:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
... ...
"persistent" : { }, "persistent" : { },
"transient" : { "transient" : {
"indices.recovery.max_bytes_per_sec" : "20mb" "indices.recovery.max_bytes_per_sec" : "20mb"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/\.\.\./"acknowledged": true,/] // TESTRESPONSE[s/\.\.\./"acknowledged": true,/]
@ -104,9 +104,9 @@ This example resets a setting:
-------------------------------------------------- --------------------------------------------------
PUT /_cluster/settings PUT /_cluster/settings
{ {
"transient" : { "transient" : {
"indices.recovery.max_bytes_per_sec" : null "indices.recovery.max_bytes_per_sec" : null
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -116,9 +116,9 @@ The response does not include settings that have been reset:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
... ...
"persistent" : {}, "persistent" : {},
"transient" : {} "transient" : {}
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/\.\.\./"acknowledged": true,/] // TESTRESPONSE[s/\.\.\./"acknowledged": true,/]
@ -131,8 +131,8 @@ all dynamic `indices.recovery` settings:
-------------------------------------------------- --------------------------------------------------
PUT /_cluster/settings PUT /_cluster/settings
{ {
"transient" : { "transient" : {
"indices.recovery.*" : null "indices.recovery.*" : null
} }
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -24,8 +24,8 @@ an initial sequence number and primary term:
-------------------------------------------------- --------------------------------------------------
PUT products/_doc/1567 PUT products/_doc/1567
{ {
"product" : "r2d2", "product" : "r2d2",
"details" : "A resourceful astromech droid" "details" : "A resourceful astromech droid"
} }
-------------------------------------------------- --------------------------------------------------
@ -35,21 +35,22 @@ You can see the assigned sequence number and primary term in the
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards" : { "_shards": {
"total" : 2, "total": 2,
"failed" : 0, "failed": 0,
"successful" : 1 "successful": 1
}, },
"_index" : "products", "_index": "products",
"_type" : "_doc", "_type": "_doc",
"_id" : "1567", "_id": "1567",
"_version" : 1, "_version": 1,
"_seq_no" : 362, "_seq_no": 362,
"_primary_term" : 2, "_primary_term": 2,
"result" : "created" "result": "created"
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 2/"_primary_term" : $body._primary_term/] // TESTRESPONSE[s/"_seq_no": 362/"_seq_no": $body._seq_no/]
// TESTRESPONSE[s/"_primary_term": 2/"_primary_term": $body._primary_term/]
Elasticsearch keeps tracks of the sequence number and primary term of the last Elasticsearch keeps tracks of the sequence number and primary term of the last
@ -68,20 +69,21 @@ returns:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_index" : "products", "_index": "products",
"_type" : "_doc", "_type": "_doc",
"_id" : "1567", "_id": "1567",
"_version" : 1, "_version": 1,
"_seq_no" : 362, "_seq_no": 362,
"_primary_term" : 2, "_primary_term": 2,
"found": true, "found": true,
"_source" : { "_source": {
"product" : "r2d2", "product": "r2d2",
"details" : "A resourceful astromech droid" "details": "A resourceful astromech droid"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 2/"_primary_term" : $body._primary_term/] // TESTRESPONSE[s/"_seq_no": 362/"_seq_no": $body._seq_no/]
// TESTRESPONSE[s/"_primary_term": 2/"_primary_term": $body._primary_term/]
Note: The <<search-search,Search API>> can return the `_seq_no` and `_primary_term` Note: The <<search-search,Search API>> can return the `_seq_no` and `_primary_term`
@ -102,9 +104,9 @@ of another tag by another API:
-------------------------------------------------- --------------------------------------------------
PUT products/_doc/1567?if_seq_no=362&if_primary_term=2 PUT products/_doc/1567?if_seq_no=362&if_primary_term=2
{ {
"product" : "r2d2", "product": "r2d2",
"details" : "A resourceful astromech droid", "details": "A resourceful astromech droid",
"tags": ["droid"] "tags": [ "droid" ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -65,7 +65,7 @@ Example to delete with routing
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_doc/1?routing=kimchy PUT /twitter/_doc/1?routing=kimchy
{ {
"test": "test" "test": "test"
} }
-------------------------------------------------- --------------------------------------------------
//// ////
@ -179,20 +179,20 @@ The API returns the following result:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards" : { "_shards": {
"total" : 2, "total": 2,
"failed" : 0, "failed": 0,
"successful" : 2 "successful": 2
}, },
"_index" : "twitter", "_index": "twitter",
"_type" : "_doc", "_type": "_doc",
"_id" : "1", "_id": "1",
"_version" : 2, "_version": 2,
"_primary_term": 1, "_primary_term": 1,
"_seq_no": 5, "_seq_no": 5,
"result": "deleted" "result": "deleted"
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"successful" : 2/"successful" : 1/] // TESTRESPONSE[s/"successful": 2/"successful": 1/]
// TESTRESPONSE[s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] // TESTRESPONSE[s/"_primary_term": 1/"_primary_term": $body._primary_term/]
// TESTRESPONSE[s/"_seq_no" : 5/"_seq_no" : $body._seq_no/] // TESTRESPONSE[s/"_seq_no": 5/"_seq_no": $body._seq_no/]

View File

@ -234,22 +234,22 @@ The API returns the following result:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_index" : "twitter", "_index": "twitter",
"_type" : "_doc", "_type": "_doc",
"_id" : "0", "_id": "0",
"_version" : 1, "_version": 1,
"_seq_no" : 10, "_seq_no": 10,
"_primary_term" : 1, "_primary_term": 1,
"found": true, "found": true,
"_source" : { "_source": {
"user" : "kimchy", "user": "kimchy",
"date" : "2009-11-15T14:12:12", "date": "2009-11-15T14:12:12",
"likes": 0, "likes": 0,
"message" : "trying out Elasticsearch" "message": "trying out Elasticsearch"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] // TESTRESPONSE[s/"_seq_no": \d+/"_seq_no": $body._seq_no/ s/"_primary_term": 1/"_primary_term": $body._primary_term/]
Check to see if a document with the `_id` 0 exists: Check to see if a document with the `_id` 0 exists:
@ -327,8 +327,8 @@ Now we can add a document:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_doc/1 PUT twitter/_doc/1
{ {
"counter" : 1, "counter": 1,
"tags" : ["red"] "tags": [ "red" ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -371,8 +371,8 @@ You can also retrieve metadata fields like the `_routing` field:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_doc/2?routing=user1 PUT twitter/_doc/2?routing=user1
{ {
"counter" : 1, "counter" : 1,
"tags" : ["white"] "tags" : ["white"]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -190,23 +190,23 @@ creation of indices. It does not affect the creation of data streams.
-------------------------------------------------- --------------------------------------------------
PUT _cluster/settings PUT _cluster/settings
{ {
"persistent": { "persistent": {
"action.auto_create_index": "twitter,index10,-index1*,+ind*" <1> "action.auto_create_index": "twitter,index10,-index1*,+ind*" <1>
} }
} }
PUT _cluster/settings PUT _cluster/settings
{ {
"persistent": { "persistent": {
"action.auto_create_index": "false" <2> "action.auto_create_index": "false" <2>
} }
} }
PUT _cluster/settings PUT _cluster/settings
{ {
"persistent": { "persistent": {
"action.auto_create_index": "true" <3> "action.auto_create_index": "true" <3>
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -239,9 +239,9 @@ the document.
-------------------------------------------------- --------------------------------------------------
POST twitter/_doc/ POST twitter/_doc/
{ {
"user" : "kimchy", "user" : "kimchy",
"post_date" : "2009-11-15T14:12:12", "post_date" : "2009-11-15T14:12:12",
"message" : "trying out Elasticsearch" "message" : "trying out Elasticsearch"
} }
-------------------------------------------------- --------------------------------------------------
@ -250,21 +250,21 @@ The API returns the following result:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards" : { "_shards": {
"total" : 2, "total": 2,
"failed" : 0, "failed": 0,
"successful" : 2 "successful": 2
}, },
"_index" : "twitter", "_index": "twitter",
"_type" : "_doc", "_type": "_doc",
"_id" : "W0tpsmIBdwcYyG50zbta", "_id": "W0tpsmIBdwcYyG50zbta",
"_version" : 1, "_version": 1,
"_seq_no" : 0, "_seq_no": 0,
"_primary_term" : 1, "_primary_term": 1,
"result": "created" "result": "created"
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful" : 2/"successful" : 1/] // TESTRESPONSE[s/W0tpsmIBdwcYyG50zbta/$body._id/ s/"successful": 2/"successful": 1/]
[float] [float]
[[optimistic-concurrency-control-index]] [[optimistic-concurrency-control-index]]
@ -289,9 +289,9 @@ on a per-operation basis using the `routing` parameter. For example:
-------------------------------------------------- --------------------------------------------------
POST twitter/_doc?routing=kimchy POST twitter/_doc?routing=kimchy
{ {
"user" : "kimchy", "user" : "kimchy",
"post_date" : "2009-11-15T14:12:12", "post_date" : "2009-11-15T14:12:12",
"message" : "trying out Elasticsearch" "message" : "trying out Elasticsearch"
} }
-------------------------------------------------- --------------------------------------------------
@ -366,11 +366,11 @@ replication succeeded/failed.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards" : { "_shards": {
"total" : 2, "total": 2,
"failed" : 0, "failed": 0,
"successful" : 2 "successful": 2
} }
} }
-------------------------------------------------- --------------------------------------------------
// NOTCONSOLE // NOTCONSOLE
@ -414,9 +414,9 @@ to 5 minutes:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_doc/1?timeout=5m PUT twitter/_doc/1?timeout=5m
{ {
"user" : "kimchy", "user" : "kimchy",
"post_date" : "2009-11-15T14:12:12", "post_date" : "2009-11-15T14:12:12",
"message" : "trying out Elasticsearch" "message" : "trying out Elasticsearch"
} }
-------------------------------------------------- --------------------------------------------------
@ -443,7 +443,7 @@ conflict will occur and the index operation will fail. For example:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_doc/1?version=2&version_type=external PUT twitter/_doc/1?version=2&version_type=external
{ {
"message" : "elasticsearch now has versioning support, double cool!" "message" : "elasticsearch now has versioning support, double cool!"
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -501,9 +501,9 @@ Insert a JSON document into the `twitter` index with an `_id` of 1:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_doc/1 PUT twitter/_doc/1
{ {
"user" : "kimchy", "user" : "kimchy",
"post_date" : "2009-11-15T14:12:12", "post_date" : "2009-11-15T14:12:12",
"message" : "trying out Elasticsearch" "message" : "trying out Elasticsearch"
} }
-------------------------------------------------- --------------------------------------------------
@ -512,21 +512,21 @@ The API returns the following result:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards" : { "_shards": {
"total" : 2, "total": 2,
"failed" : 0, "failed": 0,
"successful" : 2 "successful": 2
}, },
"_index" : "twitter", "_index": "twitter",
"_type" : "_doc", "_type": "_doc",
"_id" : "1", "_id": "1",
"_version" : 1, "_version": 1,
"_seq_no" : 0, "_seq_no": 0,
"_primary_term" : 1, "_primary_term": 1,
"result" : "created" "result": "created"
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/"successful" : 2/"successful" : 1/] // TESTRESPONSE[s/"successful": 2/"successful": 1/]
Use the `_create` resource to index a document into the `twitter` index if Use the `_create` resource to index a document into the `twitter` index if
no document with that ID exists: no document with that ID exists:
@ -535,9 +535,9 @@ no document with that ID exists:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_create/1 PUT twitter/_create/1
{ {
"user" : "kimchy", "user" : "kimchy",
"post_date" : "2009-11-15T14:12:12", "post_date" : "2009-11-15T14:12:12",
"message" : "trying out Elasticsearch" "message" : "trying out Elasticsearch"
} }
-------------------------------------------------- --------------------------------------------------
@ -548,8 +548,8 @@ index if no document with that ID exists:
-------------------------------------------------- --------------------------------------------------
PUT twitter/_doc/1?op_type=create PUT twitter/_doc/1?op_type=create
{ {
"user" : "kimchy", "user" : "kimchy",
"post_date" : "2009-11-15T14:12:12", "post_date" : "2009-11-15T14:12:12",
"message" : "trying out Elasticsearch" "message" : "trying out Elasticsearch"
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -10,16 +10,16 @@ Retrieves multiple JSON documents by ID.
-------------------------------------------------- --------------------------------------------------
GET /_mget GET /_mget
{ {
"docs" : [ "docs": [
{ {
"_index" : "twitter", "_index": "twitter",
"_id" : "1" "_id": "1"
}, },
{ {
"_index" : "twitter", "_index": "twitter",
"_id" : "2" "_id": "2"
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -130,16 +130,16 @@ If you specify an index in the request URI, only the document IDs are required i
-------------------------------------------------- --------------------------------------------------
GET /twitter/_mget GET /twitter/_mget
{ {
"docs" : [ "docs": [
{ {
"_type" : "_doc", "_type": "_doc",
"_id" : "1" "_id": "1"
}, },
{ {
"_type" : "_doc", "_type": "_doc",
"_id" : "2" "_id": "2"
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
@ -149,14 +149,14 @@ And type:
-------------------------------------------------- --------------------------------------------------
GET /test/_doc/_mget GET /test/_doc/_mget
{ {
"docs" : [ "docs": [
{ {
"_id" : "1" "_id": "1"
}, },
{ {
"_id" : "2" "_id": "2"
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -167,7 +167,7 @@ You can use the `ids` element to simplify the request:
-------------------------------------------------- --------------------------------------------------
GET /twitter/_mget GET /twitter/_mget
{ {
"ids" : ["1", "2"] "ids" : ["1", "2"]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -189,29 +189,29 @@ from document 3 but filters out the `user.location` field.
-------------------------------------------------- --------------------------------------------------
GET /_mget GET /_mget
{ {
"docs" : [ "docs": [
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "1", "_id": "1",
"_source" : false "_source": false
}, },
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "2", "_id": "2",
"_source" : ["field3", "field4"] "_source": [ "field3", "field4" ]
}, },
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "3", "_id": "3",
"_source" : { "_source": {
"include": ["user"], "include": [ "user" ],
"exclude": ["user.location"] "exclude": [ "user.location" ]
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
@ -230,20 +230,20 @@ For example, the following request retrieves `field1` and `field2` from document
-------------------------------------------------- --------------------------------------------------
GET /_mget GET /_mget
{ {
"docs" : [ "docs": [
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "1", "_id": "1",
"stored_fields" : ["field1", "field2"] "stored_fields": [ "field1", "field2" ]
}, },
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "2", "_id": "2",
"stored_fields" : ["field3", "field4"] "stored_fields": [ "field3", "field4" ]
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
@ -255,15 +255,15 @@ overridden to return `field3` and `field4` for document 2.
-------------------------------------------------- --------------------------------------------------
GET /test/_doc/_mget?stored_fields=field1,field2 GET /test/_doc/_mget?stored_fields=field1,field2
{ {
"docs" : [ "docs": [
{ {
"_id" : "1" "_id": "1"
}, },
{ {
"_id" : "2", "_id": "2",
"stored_fields" : ["field3", "field4"] "stored_fields": [ "field3", "field4" ]
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
@ -278,18 +278,18 @@ and fetches `test/_doc/1` from the shard corresponding to routing key `key2`.
-------------------------------------------------- --------------------------------------------------
GET /_mget?routing=key1 GET /_mget?routing=key1
{ {
"docs" : [ "docs": [
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "1", "_id": "1",
"routing" : "key2" "routing": "key2"
}, },
{ {
"_index" : "test", "_index": "test",
"_type" : "_doc", "_type": "_doc",
"_id" : "2" "_id": "2"
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -114,13 +114,13 @@ following simplified syntax:
-------------------------------------------------- --------------------------------------------------
POST /twitter/_mtermvectors POST /twitter/_mtermvectors
{ {
"ids" : ["1", "2"], "ids": [ "1", "2" ],
"parameters": { "parameters": {
"fields": [ "fields": [
"message" "message"
], ],
"term_statistics": true "term_statistics": true
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]

View File

@ -246,61 +246,61 @@ Response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_id": "1", "_id": "1",
"_index": "twitter", "_index": "twitter",
"_type": "_doc", "_type": "_doc",
"_version": 1, "_version": 1,
"found": true, "found": true,
"took": 6, "took": 6,
"term_vectors": { "term_vectors": {
"text": { "text": {
"field_statistics": { "field_statistics": {
"doc_count": 2, "doc_count": 2,
"sum_doc_freq": 6, "sum_doc_freq": 6,
"sum_ttf": 8 "sum_ttf": 8
},
"terms": {
"test": {
"doc_freq": 2,
"term_freq": 3,
"tokens": [
{
"end_offset": 12,
"payload": "d29yZA==",
"position": 1,
"start_offset": 8
}, },
"terms": { {
"test": { "end_offset": 17,
"doc_freq": 2, "payload": "d29yZA==",
"term_freq": 3, "position": 2,
"tokens": [ "start_offset": 13
{ },
"end_offset": 12, {
"payload": "d29yZA==", "end_offset": 22,
"position": 1, "payload": "d29yZA==",
"start_offset": 8 "position": 3,
}, "start_offset": 18
{
"end_offset": 17,
"payload": "d29yZA==",
"position": 2,
"start_offset": 13
},
{
"end_offset": 22,
"payload": "d29yZA==",
"position": 3,
"start_offset": 18
}
],
"ttf": 4
},
"twitter": {
"doc_freq": 2,
"term_freq": 1,
"tokens": [
{
"end_offset": 7,
"payload": "d29yZA==",
"position": 0,
"start_offset": 0
}
],
"ttf": 2
}
} }
],
"ttf": 4
},
"twitter": {
"doc_freq": 2,
"term_freq": 1,
"tokens": [
{
"end_offset": 7,
"payload": "d29yZA==",
"position": 0,
"start_offset": 0
}
],
"ttf": 2
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -427,18 +427,18 @@ their tf-idf must be too low.
-------------------------------------------------- --------------------------------------------------
GET /imdb/_termvectors GET /imdb/_termvectors
{ {
"doc": { "doc": {
"plot": "When wealthy industrialist Tony Stark is forced to build an armored suit after a life-threatening incident, he ultimately decides to use its technology to fight against evil." "plot": "When wealthy industrialist Tony Stark is forced to build an armored suit after a life-threatening incident, he ultimately decides to use its technology to fight against evil."
}, },
"term_statistics" : true, "term_statistics": true,
"field_statistics" : true, "field_statistics": true,
"positions": false, "positions": false,
"offsets": false, "offsets": false,
"filter" : { "filter": {
"max_num_terms" : 3, "max_num_terms": 3,
"min_term_freq" : 1, "min_term_freq": 1,
"min_doc_freq" : 1 "min_doc_freq": 1
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[skip:no imdb test index] // TEST[skip:no imdb test index]

View File

@ -86,8 +86,8 @@ First, let's index a simple doc:
-------------------------------------------------- --------------------------------------------------
PUT test/_doc/1 PUT test/_doc/1
{ {
"counter" : 1, "counter" : 1,
"tags" : ["red"] "tags" : ["red"]
} }
-------------------------------------------------- --------------------------------------------------
@ -98,13 +98,13 @@ following script:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : { "script" : {
"source": "ctx._source.counter += params.count", "source": "ctx._source.counter += params.count",
"lang": "painless", "lang": "painless",
"params" : { "params" : {
"count" : 4 "count" : 4
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -116,13 +116,13 @@ Similarly, you could use and update script to add a tag to the list of tags
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : { "script": {
"source": "ctx._source.tags.add(params.tag)", "source": "ctx._source.tags.add(params.tag)",
"lang": "painless", "lang": "painless",
"params" : { "params": {
"tag" : "blue" "tag": "blue"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -137,13 +137,13 @@ script just removes one occurrence.
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : { "script": {
"source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }", "source": "if (ctx._source.tags.contains(params.tag)) { ctx._source.tags.remove(ctx._source.tags.indexOf(params.tag)) }",
"lang": "painless", "lang": "painless",
"params" : { "params": {
"tag" : "blue" "tag": "blue"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -155,7 +155,7 @@ adds the field `new_field`:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : "ctx._source.new_field = 'value_of_new_field'" "script" : "ctx._source.new_field = 'value_of_new_field'"
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -166,7 +166,7 @@ Conversely, this script removes the field `new_field`:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : "ctx._source.remove('new_field')" "script" : "ctx._source.remove('new_field')"
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -179,13 +179,13 @@ the `tags` field contains `green`, otherwise it does nothing (`noop`):
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : { "script": {
"source": "if (ctx._source.tags.contains(params.tag)) { ctx.op = 'delete' } else { ctx.op = 'none' }", "source": "if (ctx._source.tags.contains(params.tag)) { ctx.op = 'delete' } else { ctx.op = 'none' }",
"lang": "painless", "lang": "painless",
"params" : { "params": {
"tag" : "green" "tag": "green"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -200,9 +200,9 @@ existing document:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"doc" : { "doc": {
"name" : "new_name" "name": "new_name"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -220,9 +220,9 @@ anything and return `"result": "noop"`:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"doc" : { "doc": {
"name" : "new_name" "name": "new_name"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -254,10 +254,10 @@ You can disable this behavior by setting `"detect_noop": false`:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"doc" : { "doc": {
"name" : "new_name" "name": "new_name"
}, },
"detect_noop": false "detect_noop": false
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -274,16 +274,16 @@ are inserted as a new document. If the document exists, the
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"script" : { "script": {
"source": "ctx._source.counter += params.count", "source": "ctx._source.counter += params.count",
"lang": "painless", "lang": "painless",
"params" : { "params": {
"count" : 4 "count": 4
}
},
"upsert" : {
"counter" : 1
} }
},
"upsert": {
"counter": 1
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -299,18 +299,18 @@ To run the script whether or not the document exists, set `scripted_upsert` to
-------------------------------------------------- --------------------------------------------------
POST sessions/_update/dh3sgudg8gsrgl POST sessions/_update/dh3sgudg8gsrgl
{ {
"scripted_upsert":true, "scripted_upsert": true,
"script" : { "script": {
"id": "my_web_session_summariser", "id": "my_web_session_summariser",
"params" : { "params": {
"pageViewEvent" : { "pageViewEvent": {
"url":"foo.com/bar", "url": "foo.com/bar",
"response":404, "response": 404,
"time":"2014-01-01 12:32" "time": "2014-01-01 12:32"
} }
} }
}, },
"upsert" : {} "upsert": {}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/"id": "my_web_session_summariser"/"source": "ctx._source.page_view_event = params.pageViewEvent"/] // TEST[s/"id": "my_web_session_summariser"/"source": "ctx._source.page_view_event = params.pageViewEvent"/]
@ -328,10 +328,10 @@ value:
-------------------------------------------------- --------------------------------------------------
POST test/_update/1 POST test/_update/1
{ {
"doc" : { "doc": {
"name" : "new_name" "name": "new_name"
}, },
"doc_as_upsert" : true "doc_as_upsert": true
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -333,17 +333,17 @@ To get some data into {es} that you can start searching and analyzing:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
{ {
"account_number": 0, "account_number": 0,
"balance": 16623, "balance": 16623,
"firstname": "Bradshaw", "firstname": "Bradshaw",
"lastname": "Mckenzie", "lastname": "Mckenzie",
"age": 29, "age": 29,
"gender": "F", "gender": "F",
"address": "244 Columbus Place", "address": "244 Columbus Place",
"employer": "Euron", "employer": "Euron",
"email": "bradshawmckenzie@euron.com", "email": "bradshawmckenzie@euron.com",
"city": "Hobucken", "city": "Hobucken",
"state": "CO" "state": "CO"
} }
-------------------------------------------------- --------------------------------------------------
// NOTCONSOLE // NOTCONSOLE

View File

@ -201,23 +201,23 @@ An initial search typically begins with a query to identify strongly related ter
-------------------------------------------------- --------------------------------------------------
POST clicklogs/_graph/explore POST clicklogs/_graph/explore
{ {
"query": { <1> "query": { <1>
"match": { "match": {
"query.raw": "midi" "query.raw": "midi"
}
},
"vertices": [ <2>
{
"field": "product"
}
],
"connections": { <3>
"vertices": [
{
"field": "query.raw"
}
]
} }
},
"vertices": [ <2>
{
"field": "product"
}
],
"connections": { <3>
"vertices": [
{
"field": "query.raw"
}
]
}
} }
-------------------------------------------------- --------------------------------------------------
@ -306,51 +306,51 @@ every document could be of interest, see the
-------------------------------------------------- --------------------------------------------------
POST clicklogs/_graph/explore POST clicklogs/_graph/explore
{ {
"query": { "query": {
"match": { "match": {
"query.raw": "midi" "query.raw": "midi"
} }
},
"controls": {
"use_significance": false, <1>
"sample_size": 2000, <2>
"timeout": 2000, <3>
"sample_diversity": { <4>
"field": "category.raw",
"max_docs_per_value": 500
}
},
"vertices": [
{
"field": "product",
"size": 5, <5>
"min_doc_count": 10, <6>
"shard_min_doc_count": 3 <7>
}
],
"connections": {
"query": { <8>
"bool": {
"filter": [
{
"range": {
"query_time": {
"gte": "2015-10-01 00:00:00"
}
}
}
]
}
}, },
"controls": { "vertices": [
"use_significance": false,<1>
"sample_size": 2000,<2>
"timeout": 2000,<3>
"sample_diversity": {<4>
"field": "category.raw",
"max_docs_per_value": 500
}
},
"vertices": [
{ {
"field": "product", "field": "query.raw",
"size": 5,<5> "size": 5,
"min_doc_count": 10,<6> "min_doc_count": 10,
"shard_min_doc_count": 3<7> "shard_min_doc_count": 3
} }
], ]
"connections": { }
"query": {<8>
"bool": {
"filter": [
{
"range": {
"query_time": {
"gte": "2015-10-01 00:00:00"
}
}
}
]
}
},
"vertices": [
{
"field": "query.raw",
"size": 5,
"min_doc_count": 10,
"shard_min_doc_count": 3
}
]
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -104,16 +104,16 @@ look like this:
-------------------------------------------------- --------------------------------------------------
PUT index PUT index
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"body": { "body": {
"type": "text" "type": "text"
}, },
"pagerank": { "pagerank": {
"type": "long" "type": "long"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -123,16 +123,16 @@ PUT index
-------------------------------------------------- --------------------------------------------------
GET index/_search GET index/_search
{ {
"query" : { "query": {
"script_score" : { "script_score": {
"query" : { "query": {
"match": { "body": "elasticsearch" } "match": { "body": "elasticsearch" }
}, },
"script" : { "script": {
"source" : "_score * saturation(doc['pagerank'].value, 10)" <1> "source": "_score * saturation(doc['pagerank'].value, 10)" <1>
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
//TEST[continued] //TEST[continued]
@ -148,16 +148,16 @@ look like below:
-------------------------------------------------- --------------------------------------------------
PUT index PUT index
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"body": { "body": {
"type": "text" "type": "text"
}, },
"pagerank": { "pagerank": {
"type": "rank_feature" "type": "rank_feature"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST // TEST
@ -168,21 +168,21 @@ PUT index
-------------------------------------------------- --------------------------------------------------
GET _search GET _search
{ {
"query" : { "query": {
"bool" : { "bool": {
"must": { "must": {
"match": { "body": "elasticsearch" } "match": { "body": "elasticsearch" }
}, },
"should": { "should": {
"rank_feature": { "rank_feature": {
"field": "pagerank", <1> "field": "pagerank", <1>
"saturation": { "saturation": {
"pivot": 10 "pivot": 10
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -150,11 +150,11 @@ The API returns following response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"acknowledged" : true, "acknowledged" : true,
"shards_acknowledged" : true, "shards_acknowledged" : true,
"indices" : [ { "indices" : [ {
"name" : "my_index", "name" : "my_index",
"blocked" : true "blocked" : true
} ] } ]
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -16,19 +16,19 @@ For instance the following example shows how to define a sort on a single field:
-------------------------------------------------- --------------------------------------------------
PUT twitter PUT twitter
{ {
"settings" : { "settings": {
"index" : { "index": {
"sort.field" : "date", <1> "sort.field": "date", <1>
"sort.order" : "desc" <2> "sort.order": "desc" <2>
}
},
"mappings": {
"properties": {
"date": {
"type": "date"
}
}
} }
},
"mappings": {
"properties": {
"date": {
"type": "date"
}
}
}
} }
-------------------------------------------------- --------------------------------------------------
@ -41,23 +41,23 @@ It is also possible to sort the index by more than one field:
-------------------------------------------------- --------------------------------------------------
PUT twitter PUT twitter
{ {
"settings" : { "settings": {
"index" : { "index": {
"sort.field" : ["username", "date"], <1> "sort.field": [ "username", "date" ], <1>
"sort.order" : ["asc", "desc"] <2> "sort.order": [ "asc", "desc" ] <2>
}
},
"mappings": {
"properties": {
"username": {
"type": "keyword",
"doc_values": true
},
"date": {
"type": "date"
}
}
} }
},
"mappings": {
"properties": {
"username": {
"type": "keyword",
"doc_values": true
},
"date": {
"type": "date"
}
}
}
} }
-------------------------------------------------- --------------------------------------------------
@ -114,19 +114,19 @@ For example, let's say we have an index that contains events sorted by a timesta
-------------------------------------------------- --------------------------------------------------
PUT events PUT events
{ {
"settings" : { "settings": {
"index" : { "index": {
"sort.field" : "timestamp", "sort.field": "timestamp",
"sort.order" : "desc" <1> "sort.order": "desc" <1>
}
},
"mappings": {
"properties": {
"timestamp": {
"type": "date"
}
}
} }
},
"mappings": {
"properties": {
"timestamp": {
"type": "date"
}
}
}
} }
-------------------------------------------------- --------------------------------------------------
@ -138,10 +138,10 @@ You can search for the last 10 events with:
-------------------------------------------------- --------------------------------------------------
GET /events/_search GET /events/_search
{ {
"size": 10, "size": 10,
"sort": [ "sort": [
{ "timestamp": "desc" } { "timestamp": "desc" }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -159,11 +159,11 @@ to false:
-------------------------------------------------- --------------------------------------------------
GET /events/_search GET /events/_search
{ {
"size": 10, "size": 10,
"sort": [ <1> "sort": [ <1>
{ "timestamp": "desc" } { "timestamp": "desc" }
], ],
"track_total_hits": false "track_total_hits": false
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -33,15 +33,15 @@ All of the above settings are _dynamic_ and can be set for each index using the
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_settings PUT /twitter/_settings
{ {
"index.search.slowlog.threshold.query.warn": "10s", "index.search.slowlog.threshold.query.warn": "10s",
"index.search.slowlog.threshold.query.info": "5s", "index.search.slowlog.threshold.query.info": "5s",
"index.search.slowlog.threshold.query.debug": "2s", "index.search.slowlog.threshold.query.debug": "2s",
"index.search.slowlog.threshold.query.trace": "500ms", "index.search.slowlog.threshold.query.trace": "500ms",
"index.search.slowlog.threshold.fetch.warn": "1s", "index.search.slowlog.threshold.fetch.warn": "1s",
"index.search.slowlog.threshold.fetch.info": "800ms", "index.search.slowlog.threshold.fetch.info": "800ms",
"index.search.slowlog.threshold.fetch.debug": "500ms", "index.search.slowlog.threshold.fetch.debug": "500ms",
"index.search.slowlog.threshold.fetch.trace": "200ms", "index.search.slowlog.threshold.fetch.trace": "200ms",
"index.search.slowlog.level": "info" "index.search.slowlog.level": "info"
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -143,12 +143,12 @@ All of the above settings are _dynamic_ and can be set for each index using the
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_settings PUT /twitter/_settings
{ {
"index.indexing.slowlog.threshold.index.warn": "10s", "index.indexing.slowlog.threshold.index.warn": "10s",
"index.indexing.slowlog.threshold.index.info": "5s", "index.indexing.slowlog.threshold.index.info": "5s",
"index.indexing.slowlog.threshold.index.debug": "2s", "index.indexing.slowlog.threshold.index.debug": "2s",
"index.indexing.slowlog.threshold.index.trace": "500ms", "index.indexing.slowlog.threshold.index.trace": "500ms",
"index.indexing.slowlog.level": "info", "index.indexing.slowlog.level": "info",
"index.indexing.slowlog.source": "1000" "index.indexing.slowlog.source": "1000"
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]

View File

@ -85,11 +85,11 @@ with a mapping for the `user_id` field:
-------------------------------------------------- --------------------------------------------------
PUT /users PUT /users
{ {
"mappings" : { "mappings" : {
"properties" : { "properties" : {
"user_id" : {"type" : "integer"} "user_id" : {"type" : "integer"}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -99,12 +99,12 @@ Then add the index alias for a specific user, `user_12`:
-------------------------------------------------- --------------------------------------------------
PUT /users/_alias/user_12 PUT /users/_alias/user_12
{ {
"routing" : "12", "routing" : "12",
"filter" : { "filter" : {
"term" : { "term" : {
"user_id" : 12 "user_id" : 12
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -119,18 +119,18 @@ to add an index alias during index creation.
-------------------------------------------------- --------------------------------------------------
PUT /logs_20302801 PUT /logs_20302801
{ {
"mappings" : { "mappings": {
"properties" : { "properties": {
"year" : {"type" : "integer"} "year": { "type": "integer" }
}
},
"aliases" : {
"current_day" : {},
"2030" : {
"filter" : {
"term" : {"year" : 2030 }
}
}
} }
},
"aliases": {
"current_day": {},
"2030": {
"filter": {
"term": { "year": 2030 }
}
}
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -12,9 +12,9 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
---- ----
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "add" : { "index" : "twitter", "alias" : "alias1" } } { "add" : { "index" : "twitter", "alias" : "alias1" } }
] ]
} }
---- ----
// TEST[setup:twitter] // TEST[setup:twitter]
@ -170,9 +170,9 @@ The following request adds the `alias1` alias to the `test1` index.
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "add" : { "index" : "test1", "alias" : "alias1" } } { "add" : { "index" : "test1", "alias" : "alias1" } }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test1\nPUT test2\n/] // TEST[s/^/PUT test1\nPUT test2\n/]
@ -186,9 +186,9 @@ The following request removes the `alias1` alias.
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "remove" : { "index" : "test1", "alias" : "alias1" } } { "remove" : { "index" : "test1", "alias" : "alias1" } }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -204,10 +204,10 @@ period of time where the alias does not point to an index:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "remove" : { "index" : "test1", "alias" : "alias1" } }, { "remove" : { "index" : "test1", "alias" : "alias1" } },
{ "add" : { "index" : "test1", "alias" : "alias2" } } { "add" : { "index" : "test1", "alias" : "alias2" } }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -222,10 +222,10 @@ actions:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "add" : { "index" : "test1", "alias" : "alias1" } }, { "add" : { "index" : "test1", "alias" : "alias1" } },
{ "add" : { "index" : "test2", "alias" : "alias1" } } { "add" : { "index" : "test2", "alias" : "alias1" } }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test1\nPUT test2\n/] // TEST[s/^/PUT test1\nPUT test2\n/]
@ -236,9 +236,9 @@ Multiple indices can be specified for an action with the `indices` array syntax:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "add" : { "indices" : ["test1", "test2"], "alias" : "alias1" } } { "add" : { "indices" : ["test1", "test2"], "alias" : "alias1" } }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test1\nPUT test2\n/] // TEST[s/^/PUT test1\nPUT test2\n/]
@ -253,9 +253,9 @@ more than one index that share a common name:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "add" : { "index" : "test*", "alias" : "all_test_indices" } } { "add" : { "index" : "test*", "alias" : "all_test_indices" } }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test1\nPUT test2\n/] // TEST[s/^/PUT test1\nPUT test2\n/]
@ -278,10 +278,10 @@ PUT test <1>
PUT test_2 <2> PUT test_2 <2>
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions" : [
{ "add": { "index": "test_2", "alias": "test" } }, { "add": { "index": "test_2", "alias": "test" } },
{ "remove_index": { "index": "test" } } <3> { "remove_index": { "index": "test" } } <3>
] ]
} }
-------------------------------------------------- --------------------------------------------------
@ -320,15 +320,15 @@ Now we can create an alias that uses a filter on field `user`:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions": [
{ {
"add" : { "add": {
"index" : "test1", "index": "test1",
"alias" : "alias2", "alias": "alias2",
"filter" : { "term" : { "user" : "kimchy" } } "filter": { "term": { "user": "kimchy" } }
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -348,15 +348,15 @@ automatically modified to use value `1` for routing:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions": [
{ {
"add" : { "add": {
"index" : "test", "index": "test",
"alias" : "alias1", "alias": "alias1",
"routing" : "1" "routing": "1"
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test\n/] // TEST[s/^/PUT test\n/]
@ -368,16 +368,16 @@ and indexing operations:
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions": [
{ {
"add" : { "add": {
"index" : "test", "index": "test",
"alias" : "alias2", "alias": "alias2",
"search_routing" : "1,2", "search_routing": "1,2",
"index_routing" : "2" "index_routing": "2"
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test\n/] // TEST[s/^/PUT test\n/]
@ -415,21 +415,21 @@ Rollover (see <<indices-rollover-index, Rollover With Write Index>>).
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions": [
{ {
"add" : { "add": {
"index" : "test", "index": "test",
"alias" : "alias1", "alias": "alias1",
"is_write_index" : true "is_write_index": true
} }
}, },
{ {
"add" : { "add": {
"index" : "test2", "index": "test2",
"alias" : "alias1" "alias": "alias1"
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test\nPUT test2\n/] // TEST[s/^/PUT test\nPUT test2\n/]
@ -441,7 +441,7 @@ In this example, we associate the alias `alias1` to both `test` and `test2`, whe
-------------------------------------------------- --------------------------------------------------
PUT /alias1/_doc/1 PUT /alias1/_doc/1
{ {
"foo": "bar" "foo": "bar"
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -462,21 +462,21 @@ do an atomic swap. The swap is not dependent on the ordering of the actions.
-------------------------------------------------- --------------------------------------------------
POST /_aliases POST /_aliases
{ {
"actions" : [ "actions": [
{ {
"add" : { "add": {
"index" : "test", "index": "test",
"alias" : "alias1", "alias": "alias1",
"is_write_index" : false "is_write_index": false
} }
}, { }, {
"add" : { "add": {
"index" : "test2", "index": "test2",
"alias" : "alias1", "alias": "alias1",
"is_write_index" : true "is_write_index": true
} }
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[s/^/PUT test\nPUT test2\n/] // TEST[s/^/PUT test\nPUT test2\n/]

View File

@ -102,34 +102,34 @@ NOTE: Using the following analyzer as an index analyzer results in an error.
-------------------------------------------------- --------------------------------------------------
PUT /my_index PUT /my_index
{ {
"settings": { "settings": {
"index" : { "index": {
"analysis" : { "analysis": {
"analyzer" : { "analyzer": {
"my_synonyms" : { "my_synonyms": {
"tokenizer" : "whitespace", "tokenizer": "whitespace",
"filter" : ["synonym"] "filter": [ "synonym" ]
} }
}, },
"filter" : { "filter": {
"synonym" : { "synonym": {
"type" : "synonym_graph", "type": "synonym_graph",
"synonyms_path" : "analysis/synonym.txt", <1> "synonyms_path": "analysis/synonym.txt", <1>
"updateable" : true <2> "updateable": true <2>
} }
}
}
}
},
"mappings": {
"properties": {
"text": {
"type": "text",
"analyzer" : "standard",
"search_analyzer": "my_synonyms" <3>
}
} }
}
} }
},
"mappings": {
"properties": {
"text": {
"type": "text",
"analyzer": "standard",
"search_analyzer": "my_synonyms" <3>
}
}
}
} }
-------------------------------------------------- --------------------------------------------------
@ -153,25 +153,25 @@ The API returns the following response.
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"_shards" : { "_shards": {
"total" : 2, "total": 2,
"successful" : 2, "successful": 2,
"failed" : 0 "failed": 0
}, },
"reload_details" : [ "reload_details": [
{ {
"index" : "my_index", "index": "my_index",
"reloaded_analyzers" : [ "reloaded_analyzers": [
"my_synonyms" "my_synonyms"
], ],
"reloaded_node_ids" : [ "reloaded_node_ids": [
"mfdqTXn_T7SGr2Ho2KT8uw" "mfdqTXn_T7SGr2Ho2KT8uw"
] ]
} }
] ]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
// TESTRESPONSE[s/"total" : 2/"total" : $body._shards.total/] // TESTRESPONSE[s/"total": 2/"total": $body._shards.total/]
// TESTRESPONSE[s/"successful" : 2/"successful" : $body._shards.successful/] // TESTRESPONSE[s/"successful": 2/"successful": $body._shards.successful/]
// TESTRESPONSE[s/mfdqTXn_T7SGr2Ho2KT8uw/$body.reload_details.0.reloaded_node_ids.0/] // TESTRESPONSE[s/mfdqTXn_T7SGr2Ho2KT8uw/$body.reload_details.0.reloaded_node_ids.0/]

View File

@ -73,12 +73,12 @@ The API returns following response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"acknowledged" : true, "acknowledged": true,
"shards_acknowledged" : true, "shards_acknowledged": true,
"indices" : { "indices": {
"my_index" : { "my_index": {
"closed" : true "closed": true
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -83,12 +83,12 @@ associated with it, defined in the body:
-------------------------------------------------- --------------------------------------------------
PUT /twitter PUT /twitter
{ {
"settings" : { "settings": {
"index" : { "index": {
"number_of_shards" : 3, <1> "number_of_shards": 3, <1>
"number_of_replicas" : 2 <2> "number_of_replicas": 2 <2>
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -101,10 +101,10 @@ or more simplified
-------------------------------------------------- --------------------------------------------------
PUT /twitter PUT /twitter
{ {
"settings" : { "settings": {
"number_of_shards" : 3, "number_of_shards": 3,
"number_of_replicas" : 2 "number_of_replicas": 2
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -125,14 +125,14 @@ The create index API allows for providing a mapping definition:
-------------------------------------------------- --------------------------------------------------
PUT /test PUT /test
{ {
"settings" : { "settings": {
"number_of_shards" : 1 "number_of_shards": 1
}, },
"mappings" : { "mappings": {
"properties" : { "properties": {
"field1" : { "type" : "text" } "field1": { "type": "text" }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -149,15 +149,15 @@ The create index API allows also to provide a set of <<indices-aliases,aliases>>
-------------------------------------------------- --------------------------------------------------
PUT /test PUT /test
{ {
"aliases" : { "aliases": {
"alias_1" : {}, "alias_1": {},
"alias_2" : { "alias_2": {
"filter" : { "filter": {
"term" : {"user" : "kimchy" } "term": { "user": "kimchy" }
}, },
"routing" : "kimchy" "routing": "kimchy"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -171,9 +171,9 @@ what happened:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"acknowledged": true, "acknowledged": true,
"shards_acknowledged": true, "shards_acknowledged": true,
"index": "test" "index": "test"
} }
-------------------------------------------------- --------------------------------------------------
@ -196,9 +196,9 @@ the `wait_for_active_shards` value on all subsequent write operations):
-------------------------------------------------- --------------------------------------------------
PUT /test PUT /test
{ {
"settings": { "settings": {
"index.write.wait_for_active_shards": "2" "index.write.wait_for_active_shards": "2"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[skip:requires two nodes] // TEST[skip:requires two nodes]

View File

@ -60,6 +60,6 @@ The API returns following response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"acknowledged" : true "acknowledged" : true
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -78,19 +78,19 @@ index with several field mappings.
-------------------------------------------------- --------------------------------------------------
PUT /publications PUT /publications
{ {
"mappings": { "mappings": {
"properties": {
"id": { "type": "text" },
"title": { "type": "text" },
"abstract": { "type": "text" },
"author": {
"properties": { "properties": {
"id": { "type": "text" }, "id": { "type": "text" },
"title": { "type": "text"}, "name": { "type": "text" }
"abstract": { "type": "text"},
"author": {
"properties": {
"id": { "type": "text" },
"name": { "type": "text" }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -129,7 +129,7 @@ The API returns the following response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"acknowledged" : true, "acknowledged" : true,
"shards_acknowledged" : true "shards_acknowledged" : true
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -443,12 +443,12 @@ with `user_id` field values.
---- ----
POST /users/_doc?refresh=wait_for POST /users/_doc?refresh=wait_for
{ {
"user_id" : 12345 "user_id" : 12345
} }
POST /users/_doc?refresh=wait_for POST /users/_doc?refresh=wait_for
{ {
"user_id" : 12346 "user_id" : 12346
} }
---- ----
// TEST[continued] // TEST[continued]

View File

@ -200,27 +200,27 @@ The API returns the following response:
[source,console-response] [source,console-response]
-------------------------------------------------- --------------------------------------------------
{ {
... ...
"_0": { "_0": {
...
"ram_tree": [
{
"description": "postings [PerFieldPostings(format=1)]",
"size_in_bytes": 2696,
"children": [
{
"description": "format 'Lucene50_0' ...",
"size_in_bytes": 2608,
"children" :[ ... ]
},
... ...
"ram_tree": [ ]
{ },
"description": "postings [PerFieldPostings(format=1)]", ...
"size_in_bytes": 2696, ]
"children": [
{
"description": "format 'Lucene50_0' ...",
"size_in_bytes": 2608,
"children" :[ ... ]
},
...
]
},
...
]
} }
... ...
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[skip:Response is too verbose to be fully shown in documentation, so we just show the relevant bit and don't test the response.] // TESTRESPONSE[skip:Response is too verbose to be fully shown in documentation, so we just show the relevant bit and don't test the response.]

View File

@ -146,20 +146,20 @@ You can include <<indices-aliases,index aliases>> in an index template.
-------------------------------------------------- --------------------------------------------------
PUT _template/template_1 PUT _template/template_1
{ {
"index_patterns" : ["te*"], "index_patterns" : ["te*"],
"settings" : { "settings" : {
"number_of_shards" : 1 "number_of_shards" : 1
},
"aliases" : {
"alias1" : {},
"alias2" : {
"filter" : {
"term" : {"user" : "kimchy" }
},
"routing" : "kimchy"
}, },
"aliases" : { "{index}-alias" : {} <1>
"alias1" : {}, }
"alias2" : {
"filter" : {
"term" : {"user" : "kimchy" }
},
"routing" : "kimchy"
},
"{index}-alias" : {} <1>
}
} }
-------------------------------------------------- --------------------------------------------------
@ -180,26 +180,26 @@ orders overriding them. For example:
-------------------------------------------------- --------------------------------------------------
PUT /_template/template_1 PUT /_template/template_1
{ {
"index_patterns" : ["te*"], "index_patterns" : ["te*"],
"order" : 0, "order" : 0,
"settings" : { "settings" : {
"number_of_shards" : 1 "number_of_shards" : 1
}, },
"mappings" : { "mappings" : {
"_source" : { "enabled" : false } "_source" : { "enabled" : false }
} }
} }
PUT /_template/template_2 PUT /_template/template_2
{ {
"index_patterns" : ["tes*"], "index_patterns" : ["tes*"],
"order" : 1, "order" : 1,
"settings" : { "settings" : {
"number_of_shards" : 1 "number_of_shards" : 1
}, },
"mappings" : { "mappings" : {
"_source" : { "enabled" : true } "_source" : { "enabled" : true }
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -231,12 +231,12 @@ replace the template without specifying one.
-------------------------------------------------- --------------------------------------------------
PUT /_template/template_1 PUT /_template/template_1
{ {
"index_patterns" : ["myindex-*"], "index_patterns" : ["myindex-*"],
"order" : 0, "order" : 0,
"settings" : { "settings" : {
"number_of_shards" : 1 "number_of_shards" : 1
}, },
"version": 123 "version": 123
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -13,9 +13,9 @@ default.
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_settings PUT /twitter/_settings
{ {
"index" : { "index" : {
"number_of_replicas" : 2 "number_of_replicas" : 2
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -79,9 +79,9 @@ To revert a setting to the default value, use `null`. For example:
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_settings PUT /twitter/_settings
{ {
"index" : { "index" : {
"refresh_interval" : null "refresh_interval" : null
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -103,9 +103,9 @@ use:
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_settings PUT /twitter/_settings
{ {
"index" : { "index" : {
"refresh_interval" : "-1" "refresh_interval" : "-1"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -120,9 +120,9 @@ the defaults for example):
-------------------------------------------------- --------------------------------------------------
PUT /twitter/_settings PUT /twitter/_settings
{ {
"index" : { "index" : {
"refresh_interval" : "1s" "refresh_interval" : "1s"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -14,20 +14,20 @@ Deletes an existing <<enrich-policy,enrich policy>> and its
---- ----
PUT /users PUT /users
{ {
"mappings" : { "mappings": {
"properties" : { "properties": {
"email" : { "type" : "keyword" } "email": { "type": "keyword" }
}
} }
}
} }
PUT /_enrich/policy/my-policy PUT /_enrich/policy/my-policy
{ {
"match": { "match": {
"indices": "users", "indices": "users",
"match_field": "email", "match_field": "email",
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"] "enrich_fields": [ "first_name", "last_name", "city", "zip", "state" ]
} }
} }
---- ----
// TESTSETUP // TESTSETUP

View File

@ -101,32 +101,32 @@ The API returns the following response:
[source,console-result] [source,console-result]
---- ----
{ {
"executing_policies": [ "executing_policies": [
{ {
"name": "my-policy", "name": "my-policy",
"task": { "task": {
"id" : 124, "id": 124,
"type" : "direct", "type": "direct",
"action" : "cluster:admin/xpack/enrich/execute", "action": "cluster:admin/xpack/enrich/execute",
"start_time_in_millis" : 1458585884904, "start_time_in_millis": 1458585884904,
"running_time_in_nanos" : 47402, "running_time_in_nanos": 47402,
"cancellable" : false, "cancellable": false,
"parent_task_id" : "oTUltX4IQMOUUVeiohTt8A:123", "parent_task_id": "oTUltX4IQMOUUVeiohTt8A:123",
"headers" : { "headers": {
"X-Opaque-Id" : "123456" "X-Opaque-Id": "123456"
}
}
} }
], }
"coordinator_stats": [ }
{ ],
"node_id": "1sFM8cmSROZYhPxVsiWew", "coordinator_stats": [
"queue_size": 0, {
"remote_requests_current": 0, "node_id": "1sFM8cmSROZYhPxVsiWew",
"remote_requests_total": 0, "queue_size": 0,
"executed_searches_total": 0 "remote_requests_current": 0,
} "remote_requests_total": 0,
] "executed_searches_total": 0
}
]
} }
---- ----
// TESTRESPONSE[s/"executing_policies": \[[^\]]*\]/"executing_policies": $body.$_path/] // TESTRESPONSE[s/"executing_policies": \[[^\]]*\]/"executing_policies": $body.$_path/]

View File

@ -14,23 +14,23 @@ Executes an existing <<enrich-policy,enrich policy>>.
---- ----
PUT /users/_doc/1?refresh PUT /users/_doc/1?refresh
{ {
"email": "mardy.brown@asciidocsmith.com", "email": "mardy.brown@asciidocsmith.com",
"first_name": "Mardy", "first_name": "Mardy",
"last_name": "Brown", "last_name": "Brown",
"city": "New Orleans", "city": "New Orleans",
"county": "Orleans", "county": "Orleans",
"state": "LA", "state": "LA",
"zip": 70116, "zip": 70116,
"web": "mardy.asciidocsmith.com" "web": "mardy.asciidocsmith.com"
} }
PUT /_enrich/policy/my-policy PUT /_enrich/policy/my-policy
{ {
"match": { "match": {
"indices": "users", "indices": "users",
"match_field": "email", "match_field": "email",
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"] "enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
} }
} }
---- ----
// TESTSETUP // TESTSETUP

View File

@ -13,29 +13,29 @@ Returns information about an <<enrich-policy,enrich policy>>.
---- ----
PUT /users PUT /users
{ {
"mappings" : { "mappings" : {
"properties" : { "properties" : {
"email" : { "type" : "keyword" } "email" : { "type" : "keyword" }
}
} }
}
} }
PUT /_enrich/policy/my-policy PUT /_enrich/policy/my-policy
{ {
"match": { "match": {
"indices": "users", "indices": "users",
"match_field": "email", "match_field": "email",
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"] "enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
} }
} }
PUT /_enrich/policy/other-policy PUT /_enrich/policy/other-policy
{ {
"match": { "match": {
"indices": "users", "indices": "users",
"match_field": "email", "match_field": "email",
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"] "enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
} }
} }
---- ----
//// ////
@ -96,24 +96,24 @@ The API returns the following response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"policies": [ "policies": [
{ {
"config": { "config": {
"match": { "match": {
"name" : "my-policy", "name": "my-policy",
"indices" : ["users"], "indices": [ "users" ],
"match_field" : "email", "match_field": "email",
"enrich_fields" : [ "enrich_fields": [
"first_name", "first_name",
"last_name", "last_name",
"city", "city",
"zip", "zip",
"state" "state"
] ]
}
}
} }
] }
}
]
} }
-------------------------------------------------- --------------------------------------------------
@ -132,40 +132,40 @@ The API returns the following response:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
{ {
"policies": [ "policies": [
{ {
"config": { "config": {
"match": { "match": {
"name" : "my-policy", "name": "my-policy",
"indices" : ["users"], "indices": [ "users" ],
"match_field" : "email", "match_field": "email",
"enrich_fields" : [ "enrich_fields": [
"first_name", "first_name",
"last_name", "last_name",
"city", "city",
"zip", "zip",
"state" "state"
] ]
}
}
},
{
"config": {
"match": {
"name" : "other-policy",
"indices" : ["users"],
"match_field" : "email",
"enrich_fields" : [
"first_name",
"last_name",
"city",
"zip",
"state"
]
}
}
} }
] }
},
{
"config": {
"match": {
"name": "other-policy",
"indices": [ "users" ],
"match_field": "email",
"enrich_fields": [
"first_name",
"last_name",
"city",
"zip",
"state"
]
}
}
}
]
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE // TESTRESPONSE
@ -185,40 +185,40 @@ The API returns the following response:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
"policies": [ "policies": [
{ {
"config": { "config": {
"match": { "match": {
"name" : "my-policy", "name": "my-policy",
"indices" : ["users"], "indices": [ "users" ],
"match_field" : "email", "match_field": "email",
"enrich_fields" : [ "enrich_fields": [
"first_name", "first_name",
"last_name", "last_name",
"city", "city",
"zip", "zip",
"state" "state"
] ]
}
}
},
{
"config": {
"match": {
"name" : "other-policy",
"indices" : ["users"],
"match_field" : "email",
"enrich_fields" : [
"first_name",
"last_name",
"city",
"zip",
"state"
]
}
}
} }
] }
},
{
"config": {
"match": {
"name": "other-policy",
"indices": [ "users" ],
"match_field": "email",
"enrich_fields": [
"first_name",
"last_name",
"city",
"zip",
"state"
]
}
}
}
]
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -13,11 +13,11 @@ Creates an enrich policy.
---- ----
PUT /users PUT /users
{ {
"mappings" : { "mappings": {
"properties" : { "properties": {
"email" : { "type" : "keyword" } "email": { "type": "keyword" }
}
} }
}
} }
---- ----
//// ////
@ -26,11 +26,11 @@ PUT /users
---- ----
PUT /_enrich/policy/my-policy PUT /_enrich/policy/my-policy
{ {
"match": { "match": {
"indices": "users", "indices": "users",
"match_field": "email", "match_field": "email",
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"] "enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
} }
} }
---- ----
// TEST[continued] // TEST[continued]

View File

@ -243,12 +243,11 @@ following:
[source,js] [source,js]
---- ----
{ {
"<enrich_policy_type>": { "<enrich_policy_type>": {
"indices": ["..."], "indices": [ "..." ],
"match_field": "...", "match_field": "...",
"enrich_fields": ["..."], "enrich_fields": [ "..." ],
"query": {...} "query": {... }
}
} }
} }
---- ----
@ -321,16 +320,16 @@ containing at least one `geo_shape` field.
---- ----
PUT /postal_codes PUT /postal_codes
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"location": { "location": {
"type": "geo_shape" "type": "geo_shape"
}, },
"postal_code": { "postal_code": {
"type": "keyword" "type": "keyword"
} }
}
} }
}
} }
---- ----
@ -340,11 +339,11 @@ Use the <<docs-index_,index API>> to index enrich data to this source index.
---- ----
PUT /postal_codes/_doc/1?refresh=wait_for PUT /postal_codes/_doc/1?refresh=wait_for
{ {
"location": { "location": {
"type": "envelope", "type": "envelope",
"coordinates": [[13.0, 53.0], [14.0, 52.0]] "coordinates": [ [ 13.0, 53.0 ], [ 14.0, 52.0 ] ]
}, },
"postal_code": "96598" "postal_code": "96598"
} }
---- ----
// TEST[continued] // TEST[continued]
@ -362,11 +361,11 @@ policy with the `geo_match` policy type. This policy must include:
---- ----
PUT /_enrich/policy/postal_policy PUT /_enrich/policy/postal_policy
{ {
"geo_match": { "geo_match": {
"indices": "postal_codes", "indices": "postal_codes",
"match_field": "location", "match_field": "location",
"enrich_fields": ["location","postal_code"] "enrich_fields": [ "location", "postal_code" ]
} }
} }
---- ----
// TEST[continued] // TEST[continued]
@ -419,9 +418,9 @@ include the `field` specified in your enrich processor.
---- ----
PUT /users/_doc/0?pipeline=postal_lookup PUT /users/_doc/0?pipeline=postal_lookup
{ {
"first_name": "Mardy", "first_name": "Mardy",
"last_name": "Brown", "last_name": "Brown",
"geo_location": "POINT (13.5 52.5)" "geo_location": "POINT (13.5 52.5)"
} }
---- ----
// TEST[continued] // TEST[continued]
@ -495,14 +494,14 @@ new document to that index.
---- ----
PUT /users/_doc/1?refresh=wait_for PUT /users/_doc/1?refresh=wait_for
{ {
"email": "mardy.brown@asciidocsmith.com", "email": "mardy.brown@asciidocsmith.com",
"first_name": "Mardy", "first_name": "Mardy",
"last_name": "Brown", "last_name": "Brown",
"city": "New Orleans", "city": "New Orleans",
"county": "Orleans", "county": "Orleans",
"state": "LA", "state": "LA",
"zip": 70116, "zip": 70116,
"web": "mardy.asciidocsmith.com" "web": "mardy.asciidocsmith.com"
} }
---- ----
@ -519,11 +518,11 @@ policy type. This policy must include:
---- ----
PUT /_enrich/policy/users-policy PUT /_enrich/policy/users-policy
{ {
"match": { "match": {
"indices": "users", "indices": "users",
"match_field": "email", "match_field": "email",
"enrich_fields": ["first_name", "last_name", "city", "zip", "state"] "enrich_fields": ["first_name", "last_name", "city", "zip", "state"]
} }
} }
---- ----
// TEST[continued] // TEST[continued]

View File

@ -35,16 +35,16 @@ PUT circles
PUT _ingest/pipeline/polygonize_circles PUT _ingest/pipeline/polygonize_circles
{ {
"description": "translate circle to polygon", "description": "translate circle to polygon",
"processors": [ "processors": [
{ {
"circle": { "circle": {
"field": "circle", "field": "circle",
"error_distance": 28.0, "error_distance": 28.0,
"shape_type": "geo_shape" "shape_type": "geo_shape"
}
} }
] }
]
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -230,22 +230,22 @@ PUT my_ip_locations/_doc/1?refresh=true&pipeline=geoip
GET /my_ip_locations/_search GET /my_ip_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_distance" : { "geo_distance": {
"distance" : "1m", "distance": "1m",
"geoip.location" : { "geoip.location": {
"lon" : -97.822, "lon": -97.822,
"lat" : 37.751 "lat": 37.751
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -50,17 +50,17 @@ was provided in the original index request:
-------------------------------------------------- --------------------------------------------------
PUT _ingest/pipeline/my_index PUT _ingest/pipeline/my_index
{ {
"description": "use index:my_index and type:_doc", "description": "use index:my_index",
"processors": [ "processors": [
{ {
"script": { "script": {
"source": """ "source": """
ctx._index = 'my_index'; ctx._index = 'my_index';
ctx._type = '_doc'; ctx._type = '_doc';
""" """
}
} }
] }
]
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -15,30 +15,30 @@ For example:
-------------------------------------------------- --------------------------------------------------
PUT my_index/_doc/1 PUT my_index/_doc/1
{ {
"names": [ "John Abraham", "Lincoln Smith"] "names": [ "John Abraham", "Lincoln Smith"]
} }
GET my_index/_search GET my_index/_search
{ {
"query": { "query": {
"match_phrase": { "match_phrase": {
"names": { "names": {
"query": "Abraham Lincoln" <1> "query": "Abraham Lincoln" <1>
} }
}
} }
}
} }
GET my_index/_search GET my_index/_search
{ {
"query": { "query": {
"match_phrase": { "match_phrase": {
"names": { "names": {
"query": "Abraham Lincoln", "query": "Abraham Lincoln",
"slop": 101 <2> "slop": 101 <2>
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -65,16 +65,16 @@ PUT my_index
PUT my_index/_doc/1 PUT my_index/_doc/1
{ {
"names": [ "John Abraham", "Lincoln Smith"] "names": [ "John Abraham", "Lincoln Smith"]
} }
GET my_index/_search GET my_index/_search
{ {
"query": { "query": {
"match_phrase": { "match_phrase": {
"names": "Abraham Lincoln" <2> "names": "Abraham Lincoln" <2>
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -212,13 +212,13 @@ the cell right next to it -- even though the shape is very close to the point.
-------------------------------------------------- --------------------------------------------------
PUT /example PUT /example
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"location": { "location": {
"type": "geo_shape" "type": "geo_shape"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -304,10 +304,10 @@ API. The following is an example of a point in GeoJSON.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "point", "type" : "point",
"coordinates" : [-77.03653, 38.897676] "coordinates" : [-77.03653, 38.897676]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -317,7 +317,7 @@ The following is an example of a point in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "POINT (-77.03653 38.897676)" "location" : "POINT (-77.03653 38.897676)"
} }
-------------------------------------------------- --------------------------------------------------
@ -334,10 +334,10 @@ following is an example of a LineString in GeoJSON.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "linestring", "type" : "linestring",
"coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]] "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -347,7 +347,7 @@ The following is an example of a LineString in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "LINESTRING (-77.03653 38.897676, -77.009051 38.889939)" "location" : "LINESTRING (-77.03653 38.897676, -77.009051 38.889939)"
} }
-------------------------------------------------- --------------------------------------------------
@ -366,12 +366,12 @@ closed). The following is an example of a Polygon in GeoJSON.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"coordinates" : [ "coordinates" : [
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -381,7 +381,7 @@ The following is an example of a Polygon in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))" "location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))"
} }
-------------------------------------------------- --------------------------------------------------
@ -393,13 +393,13 @@ of a polygon with a hole:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"coordinates" : [ "coordinates" : [
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ],
[ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -409,7 +409,7 @@ The following is an example of a Polygon with a hole in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))" "location" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2))"
} }
-------------------------------------------------- --------------------------------------------------
@ -436,13 +436,13 @@ crosses the dateline.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"coordinates" : [ "coordinates" : [
[ [-177.0, 10.0], [176.0, 15.0], [172.0, 0.0], [176.0, -15.0], [-177.0, -10.0], [-177.0, 10.0] ], [ [-177.0, 10.0], [176.0, 15.0], [172.0, 0.0], [176.0, -15.0], [-177.0, -10.0], [-177.0, 10.0] ],
[ [178.2, 8.2], [-178.8, 8.2], [-180.8, -8.8], [178.2, 8.8] ] [ [178.2, 8.2], [-178.8, 8.2], [-180.8, -8.8], [178.2, 8.8] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[catch:/mapper_parsing_exception/] // TEST[catch:/mapper_parsing_exception/]
@ -455,13 +455,13 @@ overriding the orientation on a document:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"orientation" : "clockwise", "orientation" : "clockwise",
"coordinates" : [ "coordinates" : [
[ [100.0, 0.0], [100.0, 1.0], [101.0, 1.0], [101.0, 0.0], [100.0, 0.0] ] [ [100.0, 0.0], [100.0, 1.0], [101.0, 1.0], [101.0, 0.0], [100.0, 0.0] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -475,12 +475,12 @@ The following is an example of a list of geojson points:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "multipoint", "type" : "multipoint",
"coordinates" : [ "coordinates" : [
[102.0, 2.0], [103.0, 2.0] [102.0, 2.0], [103.0, 2.0]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -490,7 +490,7 @@ The following is an example of a list of WKT points:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "MULTIPOINT (102.0 2.0, 103.0 2.0)" "location" : "MULTIPOINT (102.0 2.0, 103.0 2.0)"
} }
-------------------------------------------------- --------------------------------------------------
@ -504,14 +504,14 @@ The following is an example of a list of geojson linestrings:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "multilinestring", "type" : "multilinestring",
"coordinates" : [ "coordinates" : [
[ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0] ], [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0] ],
[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0] ], [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0] ],
[ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8] ] [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -521,7 +521,7 @@ The following is an example of a list of WKT linestrings:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "MULTILINESTRING ((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0), (100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8))" "location" : "MULTILINESTRING ((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0), (100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8))"
} }
-------------------------------------------------- --------------------------------------------------
@ -535,14 +535,14 @@ The following is an example of a list of geojson polygons (second polygon contai
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "multipolygon", "type" : "multipolygon",
"coordinates" : [ "coordinates" : [
[ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ], [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ],
[ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]],
[[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ] [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -552,7 +552,7 @@ The following is an example of a list of WKT polygons (second polygon contains a
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "MULTIPOLYGON (((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0, 102.0 2.0)), ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2)))" "location" : "MULTIPOLYGON (((102.0 2.0, 103.0 2.0, 103.0 3.0, 102.0 3.0, 102.0 2.0)), ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0), (100.2 0.2, 100.8 0.2, 100.8 0.8, 100.2 0.8, 100.2 0.2)))"
} }
-------------------------------------------------- --------------------------------------------------
@ -566,19 +566,19 @@ The following is an example of a collection of geojson geometry objects:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type": "geometrycollection", "type": "geometrycollection",
"geometries": [ "geometries": [
{ {
"type": "point", "type": "point",
"coordinates": [100.0, 0.0] "coordinates": [100.0, 0.0]
}, },
{ {
"type": "linestring", "type": "linestring",
"coordinates": [ [101.0, 0.0], [102.0, 1.0] ] "coordinates": [ [101.0, 0.0], [102.0, 1.0] ]
} }
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -588,7 +588,7 @@ The following is an example of a collection of WKT geometry objects:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "GEOMETRYCOLLECTION (POINT (100.0 0.0), LINESTRING (101.0 0.0, 102.0 1.0))" "location" : "GEOMETRYCOLLECTION (POINT (100.0 0.0), LINESTRING (101.0 0.0, 102.0 1.0))"
} }
-------------------------------------------------- --------------------------------------------------
@ -604,10 +604,10 @@ bounding rectangle in the format `[[minLon, maxLat], [maxLon, minLat]]`:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "envelope", "type" : "envelope",
"coordinates" : [ [100.0, 1.0], [101.0, 0.0] ] "coordinates" : [ [100.0, 1.0], [101.0, 0.0] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -619,7 +619,7 @@ The following is an example of an envelope using the WKT BBOX format:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "BBOX (100.0, 102.0, 2.0, 0.0)" "location" : "BBOX (100.0, 102.0, 2.0, 0.0)"
} }
-------------------------------------------------- --------------------------------------------------
@ -636,11 +636,11 @@ a `POLYGON`.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "circle", "type" : "circle",
"coordinates" : [101.0, 1.0], "coordinates" : [101.0, 1.0],
"radius" : "100m" "radius" : "100m"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[skip:not supported in default] // TEST[skip:not supported in default]

View File

@ -178,80 +178,80 @@ Will return:
[source,console-result] [source,console-result]
-------------------------------------------------- --------------------------------------------------
{ {
..., ...,
"hits": { "hits": {
"total" : { "total": {
"value": 4, "value": 4,
"relation": "eq" "relation": "eq"
},
"max_score": null,
"hits": [
{
"_index": "my_index",
"_type": "_doc",
"_id": "1",
"_score": null,
"_source": {
"my_id": "1",
"text": "This is a question",
"my_join_field": "question" <1>
}, },
"max_score": null, "sort": [
"hits": [ "1"
{
"_index": "my_index",
"_type": "_doc",
"_id": "1",
"_score": null,
"_source": {
"my_id": "1",
"text": "This is a question",
"my_join_field": "question" <1>
},
"sort": [
"1"
]
},
{
"_index": "my_index",
"_type": "_doc",
"_id": "2",
"_score": null,
"_source": {
"my_id": "2",
"text": "This is another question",
"my_join_field": "question" <2>
},
"sort": [
"2"
]
},
{
"_index": "my_index",
"_type": "_doc",
"_id": "3",
"_score": null,
"_routing": "1",
"_source": {
"my_id": "3",
"text": "This is an answer",
"my_join_field": {
"name": "answer", <3>
"parent": "1" <4>
}
},
"sort": [
"3"
]
},
{
"_index": "my_index",
"_type": "_doc",
"_id": "4",
"_score": null,
"_routing": "1",
"_source": {
"my_id": "4",
"text": "This is another answer",
"my_join_field": {
"name": "answer",
"parent": "1"
}
},
"sort": [
"4"
]
}
] ]
} },
{
"_index": "my_index",
"_type": "_doc",
"_id": "2",
"_score": null,
"_source": {
"my_id": "2",
"text": "This is another question",
"my_join_field": "question" <2>
},
"sort": [
"2"
]
},
{
"_index": "my_index",
"_type": "_doc",
"_id": "3",
"_score": null,
"_routing": "1",
"_source": {
"my_id": "3",
"text": "This is an answer",
"my_join_field": {
"name": "answer", <3>
"parent": "1" <4>
}
},
"sort": [
"3"
]
},
{
"_index": "my_index",
"_type": "_doc",
"_id": "4",
"_score": null,
"_routing": "1",
"_source": {
"my_id": "4",
"text": "This is another answer",
"my_join_field": {
"name": "answer",
"parent": "1"
}
},
"sort": [
"4"
]
}
]
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTRESPONSE[s/\.\.\./"timed_out": false, "took": $body.took, "_shards": $body._shards/] // TESTRESPONSE[s/\.\.\./"timed_out": false, "took": $body.took, "_shards": $body._shards/]

View File

@ -20,16 +20,16 @@ If the following mapping configures the `percolator` field type for the
-------------------------------------------------- --------------------------------------------------
PUT my_index PUT my_index
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"query": { "query": {
"type": "percolator" "type": "percolator"
}, },
"field": { "field": {
"type": "text" "type": "text"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -40,11 +40,11 @@ Then you can index a query:
-------------------------------------------------- --------------------------------------------------
PUT my_index/_doc/match_value PUT my_index/_doc/match_value
{ {
"query" : { "query": {
"match" : { "match": {
"field" : "value" "field": "value"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -77,13 +77,13 @@ for indices created with ElasticSearch 7.5.0 or higher.
-------------------------------------------------- --------------------------------------------------
PUT /example PUT /example
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"geometry": { "geometry": {
"type": "shape" "type": "shape"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -141,10 +141,10 @@ following is an example of a point in GeoJSON.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "point", "type" : "point",
"coordinates" : [-377.03653, 389.897676] "coordinates" : [-377.03653, 389.897676]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -154,7 +154,7 @@ The following is an example of a point in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "POINT (-377.03653 389.897676)" "location" : "POINT (-377.03653 389.897676)"
} }
-------------------------------------------------- --------------------------------------------------
@ -171,10 +171,10 @@ following is an example of a LineString in GeoJSON.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "linestring", "type" : "linestring",
"coordinates" : [[-377.03653, 389.897676], [-377.009051, 389.889939]] "coordinates" : [[-377.03653, 389.897676], [-377.009051, 389.889939]]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -184,7 +184,7 @@ The following is an example of a LineString in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" "location" : "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)"
} }
-------------------------------------------------- --------------------------------------------------
@ -200,12 +200,12 @@ closed). The following is an example of a Polygon in GeoJSON.
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"coordinates" : [ "coordinates" : [
[ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ] [ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -215,7 +215,7 @@ The following is an example of a Polygon in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "POLYGON ((1000.0 -1001.0, 1001.0 -1001.0, 1001.0 -1000.0, 1000.0 -1000.0, 1000.0 -1001.0))" "location" : "POLYGON ((1000.0 -1001.0, 1001.0 -1001.0, 1001.0 -1000.0, 1000.0 -1000.0, 1000.0 -1001.0))"
} }
-------------------------------------------------- --------------------------------------------------
@ -227,13 +227,13 @@ of a polygon with a hole:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"coordinates" : [ "coordinates" : [
[ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ], [ [1000.0, -1001.0], [1001.0, -1001.0], [1001.0, -1000.0], [1000.0, -1000.0], [1000.0, -1001.0] ],
[ [1000.2, -1001.2], [1000.8, -1001.2], [1000.8, -1001.8], [1000.2, -1001.8], [1000.2, -1001.2] ] [ [1000.2, -1001.2], [1000.8, -1001.2], [1000.8, -1001.8], [1000.2, -1001.8], [1000.2, -1001.2] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -243,7 +243,7 @@ The following is an example of a Polygon with a hole in WKT:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "POLYGON ((1000.0 1000.0, 1001.0 1000.0, 1001.0 1001.0, 1000.0 1001.0, 1000.0 1000.0), (1000.2 1000.2, 1000.8 1000.2, 1000.8 1000.8, 1000.2 1000.8, 1000.2 1000.2))" "location" : "POLYGON ((1000.0 1000.0, 1001.0 1000.0, 1001.0 1001.0, 1000.0 1001.0, 1000.0 1000.0), (1000.2 1000.2, 1000.8 1000.2, 1000.8 1000.8, 1000.2 1000.8, 1000.2 1000.2))"
} }
-------------------------------------------------- --------------------------------------------------
@ -265,13 +265,13 @@ The following is an example of overriding the `orientation` parameters on a docu
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "polygon", "type" : "polygon",
"orientation" : "clockwise", "orientation" : "clockwise",
"coordinates" : [ "coordinates" : [
[ [1000.0, 1000.0], [1000.0, 1001.0], [1001.0, 1001.0], [1001.0, 1000.0], [1000.0, 1000.0] ] [ [1000.0, 1000.0], [1000.0, 1001.0], [1001.0, 1001.0], [1001.0, 1000.0], [1000.0, 1000.0] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -285,12 +285,12 @@ The following is an example of a list of geojson points:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "multipoint", "type" : "multipoint",
"coordinates" : [ "coordinates" : [
[1002.0, 1002.0], [1003.0, 2000.0] [1002.0, 1002.0], [1003.0, 2000.0]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -300,7 +300,7 @@ The following is an example of a list of WKT points:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "MULTIPOINT (1002.0 2000.0, 1003.0 2000.0)" "location" : "MULTIPOINT (1002.0 2000.0, 1003.0 2000.0)"
} }
-------------------------------------------------- --------------------------------------------------
@ -314,14 +314,14 @@ The following is an example of a list of geojson linestrings:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "multilinestring", "type" : "multilinestring",
"coordinates" : [ "coordinates" : [
[ [1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0] ], [ [1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0] ],
[ [1000.0, 100.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0] ], [ [1000.0, 100.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0] ],
[ [1000.2, 100.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8] ] [ [1000.2, 100.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -331,7 +331,7 @@ The following is an example of a list of WKT linestrings:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "MULTILINESTRING ((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0), (1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0), (1000.2 0.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8))" "location" : "MULTILINESTRING ((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0), (1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0), (1000.2 0.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8))"
} }
-------------------------------------------------- --------------------------------------------------
@ -345,14 +345,14 @@ The following is an example of a list of geojson polygons (second polygon contai
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "multipolygon", "type" : "multipolygon",
"coordinates" : [ "coordinates" : [
[ [[1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0], [1002.0, 200.0]] ], [ [[1002.0, 200.0], [1003.0, 200.0], [1003.0, 300.0], [1002.0, 300.0], [1002.0, 200.0]] ],
[ [[1000.0, 200.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0], [1000.0, 100.0]], [ [[1000.0, 200.0], [1001.0, 100.0], [1001.0, 100.0], [1000.0, 100.0], [1000.0, 100.0]],
[[1000.2, 200.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8], [1000.2, 100.2]] ] [[1000.2, 200.2], [1000.8, 100.2], [1000.8, 100.8], [1000.2, 100.8], [1000.2, 100.2]] ]
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -362,7 +362,7 @@ The following is an example of a list of WKT polygons (second polygon contains a
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "MULTIPOLYGON (((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0, 102.0 200.0)), ((1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0, 1000.0 100.0), (1000.2 100.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8, 1000.2 100.2)))" "location" : "MULTIPOLYGON (((1002.0 200.0, 1003.0 200.0, 1003.0 300.0, 1002.0 300.0, 102.0 200.0)), ((1000.0 100.0, 1001.0 100.0, 1001.0 100.0, 1000.0 100.0, 1000.0 100.0), (1000.2 100.2, 1000.8 100.2, 1000.8 100.8, 1000.2 100.8, 1000.2 100.2)))"
} }
-------------------------------------------------- --------------------------------------------------
@ -376,19 +376,19 @@ The following is an example of a collection of geojson geometry objects:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type": "geometrycollection", "type": "geometrycollection",
"geometries": [ "geometries": [
{ {
"type": "point", "type": "point",
"coordinates": [1000.0, 100.0] "coordinates": [1000.0, 100.0]
}, },
{ {
"type": "linestring", "type": "linestring",
"coordinates": [ [1001.0, 100.0], [1002.0, 100.0] ] "coordinates": [ [1001.0, 100.0], [1002.0, 100.0] ]
} }
] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -398,7 +398,7 @@ The following is an example of a collection of WKT geometry objects:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "GEOMETRYCOLLECTION (POINT (1000.0 100.0), LINESTRING (1001.0 100.0, 1002.0 100.0))" "location" : "GEOMETRYCOLLECTION (POINT (1000.0 100.0), LINESTRING (1001.0 100.0, 1002.0 100.0))"
} }
-------------------------------------------------- --------------------------------------------------
@ -413,10 +413,10 @@ bounding rectangle in the format `[[minX, maxY], [maxX, minY]]`:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : { "location" : {
"type" : "envelope", "type" : "envelope",
"coordinates" : [ [1000.0, 100.0], [1001.0, 100.0] ] "coordinates" : [ [1000.0, 100.0], [1001.0, 100.0] ]
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -428,7 +428,7 @@ The following is an example of an envelope using the WKT BBOX format:
-------------------------------------------------- --------------------------------------------------
POST /example/_doc POST /example/_doc
{ {
"location" : "BBOX (1000.0, 1002.0, 2000.0, 1000.0)" "location" : "BBOX (1000.0, 1002.0, 2000.0, 1000.0)"
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -60,26 +60,26 @@ no effect on the estimation. +
-------------------------------------------------- --------------------------------------------------
POST _ml/anomaly_detectors/_estimate_model_memory POST _ml/anomaly_detectors/_estimate_model_memory
{ {
"analysis_config": { "analysis_config": {
"bucket_span": "5m", "bucket_span": "5m",
"detectors": [ "detectors": [
{ {
"function": "sum", "function": "sum",
"field_name": "bytes", "field_name": "bytes",
"by_field_name": "status", "by_field_name": "status",
"partition_field_name": "app" "partition_field_name": "app"
} }
], ],
"influencers": [ "source_ip", "dest_ip" ] "influencers": [ "source_ip", "dest_ip" ]
}, },
"overall_cardinality": { "overall_cardinality": {
"status": 10, "status": 10,
"app": 50 "app": 50
}, },
"max_bucket_cardinality": { "max_bucket_cardinality": {
"source_ip": 300, "source_ip": 300,
"dest_ip": 30 "dest_ip": 30
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[skip:needs-licence] // TEST[skip:needs-licence]

View File

@ -41,20 +41,20 @@ The following example validates job configuration information:
-------------------------------------------------- --------------------------------------------------
POST _ml/anomaly_detectors/_validate POST _ml/anomaly_detectors/_validate
{ {
"description" : "Unusual response times by airlines", "description": "Unusual response times by airlines",
"analysis_config" : { "analysis_config": {
"bucket_span": "300S", "bucket_span": "300S",
"detectors" :[ "detectors": [
{ {
"function": "metric", "function": "metric",
"field_name": "responsetime", "field_name": "responsetime",
"by_field_name": "airline"}], "by_field_name": "airline" } ],
"influencers": [ "airline" ] "influencers": [ "airline" ]
}, },
"data_description" : { "data_description": {
"time_field": "time", "time_field": "time",
"time_format": "yyyy-MM-dd'T'HH:mm:ssX" "time_format": "yyyy-MM-dd'T'HH:mm:ssX"
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[skip:needs-licence] // TEST[skip:needs-licence]

View File

@ -189,27 +189,27 @@ The API returns the following results:
[source,console-result] [source,console-result]
---- ----
{ {
"count": 1, "count": 1,
"data_frame_analytics": [ "data_frame_analytics": [
{ {
"id": "loganalytics", "id": "loganalytics",
"source": { "source": {
"index": "logdata", "index": "logdata",
"query": { "query": {
"match_all": {} "match_all": {}
}
},
"dest": {
"index": "logdata_out",
"results_field": "ml"
},
"analysis": {
"outlier_detection": {}
},
"model_memory_limit": "1gb",
"create_time": 1562265491319,
"version" : "8.0.0"
} }
] },
"dest": {
"index": "logdata_out",
"results_field": "ml"
},
"analysis": {
"outlier_detection": {}
},
"model_memory_limit": "1gb",
"create_time": 1562265491319,
"version": "8.0.0"
}
]
} }
---- ----

View File

@ -494,30 +494,30 @@ The API returns the following result:
[source,console-result] [source,console-result]
---- ----
{ {
"id": "loganalytics", "id": "loganalytics",
"description": "Outlier detection on log data", "description": "Outlier detection on log data",
"source": { "source": {
"index": ["logdata"], "index": ["logdata"],
"query": { "query": {
"match_all": {} "match_all": {}
} }
}, },
"dest": { "dest": {
"index": "logdata_out", "index": "logdata_out",
"results_field": "ml" "results_field": "ml"
}, },
"analysis": { "analysis": {
"outlier_detection": { "outlier_detection": {
"compute_feature_influence": true, "compute_feature_influence": true,
"outlier_fraction": 0.05, "outlier_fraction": 0.05,
"standardization_enabled": true "standardization_enabled": true
} }
}, },
"model_memory_limit": "1gb", "model_memory_limit": "1gb",
"create_time" : 1562265491319, "create_time" : 1562265491319,
"version" : "7.6.0", "version" : "7.6.0",
"allow_lazy_start" : false, "allow_lazy_start" : false,
"max_num_threads": 1 "max_num_threads": 1
} }
---- ----
// TESTRESPONSE[s/1562265491319/$body.$_path/] // TESTRESPONSE[s/1562265491319/$body.$_path/]

View File

@ -29,12 +29,12 @@ and the number of replicas to two.
---------------------------------- ----------------------------------
PUT /_template/custom_monitoring PUT /_template/custom_monitoring
{ {
"index_patterns": ".monitoring-*", "index_patterns": ".monitoring-*",
"order": 1, "order": 1,
"settings": { "settings": {
"number_of_shards": 5, "number_of_shards": 5,
"number_of_replicas": 2 "number_of_replicas": 2
} }
} }
---------------------------------- ----------------------------------

View File

@ -40,12 +40,12 @@ Guidelines
---- ----
GET _search GET _search
{ {
"query": { "query": {
"sample": { "sample": {
"foo": "baz", "foo": "baz",
"bar": true "bar": true
}
} }
}
} }
---- ----
// TEST[skip: REMOVE THIS COMMENT.] // TEST[skip: REMOVE THIS COMMENT.]
@ -107,12 +107,12 @@ For example:
---- ----
GET my_time_series_index/_search GET my_time_series_index/_search
{ {
"query": { "query": {
"sample": { "sample": {
"foo": "baz", "foo": "baz",
"bar": false "bar": false
}
} }
}
} }
---- ----
// TEST[skip: REMOVE THIS COMMENT.] // TEST[skip: REMOVE THIS COMMENT.]

View File

@ -18,21 +18,21 @@ excluding them from the search results.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"boosting" : { "boosting": {
"positive" : { "positive": {
"term" : { "term": {
"text" : "apple" "text": "apple"
}
},
"negative" : {
"term" : {
"text" : "pie tart fruit crumble tree"
}
},
"negative_boost" : 0.5
} }
},
"negative": {
"term": {
"text": "pie tart fruit crumble tree"
}
},
"negative_boost": 0.5
} }
}
} }
---- ----

View File

@ -12,14 +12,14 @@ parameter value.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"constant_score" : { "constant_score": {
"filter" : { "filter": {
"term" : { "user" : "kimchy"} "term": { "user": "kimchy" }
}, },
"boost" : 1.2 "boost": 1.2
}
} }
}
} }
---- ----

View File

@ -21,15 +21,15 @@ You can use the `dis_max` to search for a term in fields mapped with different
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"dis_max" : { "dis_max": {
"queries" : [ "queries": [
{ "term" : { "title" : "Quick pets" }}, { "term": { "title": "Quick pets" } },
{ "term" : { "body" : "Quick pets" }} { "term": { "body": "Quick pets" } }
], ],
"tie_breaker" : 0.7 "tie_breaker": 0.7
}
} }
}
} }
---- ----

View File

@ -20,11 +20,11 @@ An indexed value may not exist for a document's field due to a variety of reason
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"exists": { "exists": {
"field": "user" "field": "user"
}
} }
}
} }
---- ----
@ -56,14 +56,14 @@ the `user` field.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"bool": { "bool": {
"must_not": { "must_not": {
"exists": { "exists": {
"field": "user" "field": "user"
}
}
} }
}
} }
}
} }
---- ----

View File

@ -19,14 +19,14 @@ by the query.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"query": { "match_all": {} }, "query": { "match_all": {} },
"boost": "5", "boost": "5",
"random_score": {}, <1> "random_score": {}, <1>
"boost_mode":"multiply" "boost_mode": "multiply"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -41,27 +41,27 @@ given filtering query
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"query": { "match_all": {} }, "query": { "match_all": {} },
"boost": "5", <1> "boost": "5", <1>
"functions": [ "functions": [
{ {
"filter": { "match": { "test": "bar" } }, "filter": { "match": { "test": "bar" } },
"random_score": {}, <2> "random_score": {}, <2>
"weight": 23 "weight": 23
}, },
{ {
"filter": { "match": { "test": "cat" } }, "filter": { "match": { "test": "cat" } },
"weight": 42 "weight": 42
}
],
"max_boost": 42,
"score_mode": "max",
"boost_mode": "multiply",
"min_score" : 42
} }
],
"max_boost": 42,
"score_mode": "max",
"boost_mode": "multiply",
"min_score": 42
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -137,18 +137,18 @@ simple sample:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"query": { "query": {
"match": { "message": "elasticsearch" } "match": { "message": "elasticsearch" }
}, },
"script_score" : { "script_score": {
"script" : { "script": {
"source": "Math.log(2 + doc['likes'].value)" "source": "Math.log(2 + doc['likes'].value)"
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -175,22 +175,22 @@ same script, and provide parameters to it:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"query": { "query": {
"match": { "message": "elasticsearch" } "match": { "message": "elasticsearch" }
}, },
"script_score" : { "script_score": {
"script" : { "script": {
"params": { "params": {
"a": 5, "a": 5,
"b": 1.2 "b": 1.2
}, },
"source": "params.a / Math.pow(params.b, doc['likes'].value)" "source": "params.a / Math.pow(params.b, doc['likes'].value)"
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -241,14 +241,14 @@ a lot of memory.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"random_score": { "random_score": {
"seed": 10, "seed": 10,
"field": "_seq_no" "field": "_seq_no"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -269,16 +269,16 @@ doing so would look like:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"field_value_factor": { "field_value_factor": {
"field": "likes", "field": "likes",
"factor": 1.2, "factor": 1.2,
"modifier": "sqrt", "modifier": "sqrt",
"missing": 1 "missing": 1
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -380,18 +380,18 @@ days, weeks, and so on. Example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"gauss": { "gauss": {
"date": { "date": {
"origin": "2013-09-17", <1> "origin": "2013-09-17", <1>
"scale": "10d", "scale": "10d",
"offset": "5d", <2> "offset": "5d", <2>
"decay" : 0.5 <2> "decay": 0.5 <2>
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]
@ -578,34 +578,34 @@ the request would look like this:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"function_score": { "function_score": {
"functions": [ "functions": [
{ {
"gauss": { "gauss": {
"price": { "price": {
"origin": "0", "origin": "0",
"scale": "20" "scale": "20"
}
}
},
{
"gauss": {
"location": {
"origin": "11, 12",
"scale": "2km"
}
}
} }
], }
"query": { },
"match": { {
"properties": "balcony" "gauss": {
"location": {
"origin": "11, 12",
"scale": "2km"
} }
}, }
"score_mode": "multiply"
} }
],
"query": {
"match": {
"properties": "balcony"
}
},
"score_mode": "multiply"
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -29,13 +29,13 @@ The query then returns exact matches for each expansion.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"fuzzy": { "fuzzy": {
"user": { "user": {
"value": "ki" "value": "ki"
} }
}
} }
}
} }
---- ----
@ -46,18 +46,18 @@ GET /_search
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"fuzzy": { "fuzzy": {
"user": { "user": {
"value": "ki", "value": "ki",
"fuzziness": "AUTO", "fuzziness": "AUTO",
"max_expansions": 50, "max_expansions": 50,
"prefix_length": 0, "prefix_length": 0,
"transpositions": true, "transpositions": true,
"rewrite": "constant_score" "rewrite": "constant_score"
} }
}
} }
}
} }
---- ----

View File

@ -11,27 +11,27 @@ bounding box. Assuming the following indexed document:
-------------------------------------------------- --------------------------------------------------
PUT /my_locations PUT /my_locations
{ {
"mappings": { "mappings": {
"properties": {
"pin": {
"properties": { "properties": {
"pin": { "location": {
"properties": { "type": "geo_point"
"location": { }
"type": "geo_point"
}
}
}
} }
}
} }
}
} }
PUT /my_locations/_doc/1 PUT /my_locations/_doc/1
{ {
"pin" : { "pin": {
"location" : { "location": {
"lat" : 40.12, "lat": 40.12,
"lon" : -71.34 "lon": -71.34
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -43,27 +43,27 @@ Then the following simple query can be executed with a
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
},
"filter": {
"geo_bounding_box": {
"pin.location": {
"top_left": {
"lat": 40.73,
"lon": -74.1
}, },
"filter" : { "bottom_right": {
"geo_bounding_box" : { "lat": 40.01,
"pin.location" : { "lon": -71.12
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.01,
"lon" : -71.12
}
}
}
} }
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -98,27 +98,27 @@ representations of the geo point, the filter can accept it as well:
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
},
"filter": {
"geo_bounding_box": {
"pin.location": {
"top_left": {
"lat": 40.73,
"lon": -74.1
}, },
"filter" : { "bottom_right": {
"geo_bounding_box" : { "lat": 40.01,
"pin.location" : { "lon": -71.12
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.01,
"lon" : -71.12
}
}
}
} }
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -132,21 +132,21 @@ conform with http://geojson.org/[GeoJSON].
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_bounding_box" : { "geo_bounding_box": {
"pin.location" : { "pin.location": {
"top_left" : [-74.1, 40.73], "top_left": [ -74.1, 40.73 ],
"bottom_right" : [-71.12, 40.01] "bottom_right": [ -71.12, 40.01 ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -159,21 +159,21 @@ Format in `lat,lon`.
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_bounding_box" : { "geo_bounding_box": {
"pin.location" : { "pin.location": {
"top_left" : "40.73, -74.1", "top_left": "40.73, -74.1",
"bottom_right" : "40.01, -71.12" "bottom_right": "40.01, -71.12"
} }
} }
} }
} }
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -184,20 +184,20 @@ GET my_locations/_search
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_bounding_box" : { "geo_bounding_box": {
"pin.location" : { "pin.location": {
"wkt" : "BBOX (-74.1, -71.12, 40.73, 40.01)" "wkt": "BBOX (-74.1, -71.12, 40.73, 40.01)"
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -208,21 +208,21 @@ GET my_locations/_search
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_bounding_box" : { "geo_bounding_box": {
"pin.location" : { "pin.location": {
"top_left" : "dr5r9ydj2y73", "top_left": "dr5r9ydj2y73",
"bottom_right" : "drj7teegpus6" "bottom_right": "drj7teegpus6"
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -242,14 +242,14 @@ geohash the geohash can be specified in both `top_left` and
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"geo_bounding_box" : { "geo_bounding_box": {
"pin.location" : { "pin.location": {
"top_left" : "dr", "top_left": "dr",
"bottom_right" : "dr" "bottom_right": "dr"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -271,23 +271,23 @@ values separately.
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_bounding_box" : { "geo_bounding_box": {
"pin.location" : { "pin.location": {
"top" : 40.73, "top": 40.73,
"left" : -74.1, "left": -74.1,
"bottom" : 40.01, "bottom": 40.01,
"right" : -71.12 "right": -71.12
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -320,28 +320,28 @@ are not supported. Here is an example:
-------------------------------------------------- --------------------------------------------------
GET my_locations/_search GET my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
},
"filter": {
"geo_bounding_box": {
"pin.location": {
"top_left": {
"lat": 40.73,
"lon": -74.1
}, },
"filter" : { "bottom_right": {
"geo_bounding_box" : { "lat": 40.10,
"pin.location" : { "lon": -71.12
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.10,
"lon" : -71.12
}
},
"type" : "indexed"
}
} }
},
"type": "indexed"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -12,27 +12,27 @@ document:
-------------------------------------------------- --------------------------------------------------
PUT /my_locations PUT /my_locations
{ {
"mappings": { "mappings": {
"properties": {
"pin": {
"properties": { "properties": {
"pin": { "location": {
"properties": { "type": "geo_point"
"location": { }
"type": "geo_point"
}
}
}
} }
}
} }
}
} }
PUT /my_locations/_doc/1 PUT /my_locations/_doc/1
{ {
"pin" : { "pin": {
"location" : { "location": {
"lat" : 40.12, "lat": 40.12,
"lon" : -71.34 "lon": -71.34
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -45,22 +45,22 @@ filter:
-------------------------------------------------- --------------------------------------------------
GET /my_locations/_search GET /my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_distance" : { "geo_distance": {
"distance" : "200km", "distance": "200km",
"pin.location" : { "pin.location": {
"lat" : 40, "lat": 40,
"lon" : -70 "lon": -70
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -77,22 +77,22 @@ representations of the geo point, the filter can accept it as well:
-------------------------------------------------- --------------------------------------------------
GET /my_locations/_search GET /my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_distance" : { "geo_distance": {
"distance" : "12km", "distance": "12km",
"pin.location" : { "pin.location": {
"lat" : 40, "lat": 40,
"lon" : -70 "lon": -70
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -106,19 +106,19 @@ conform with http://geojson.org/[GeoJSON].
-------------------------------------------------- --------------------------------------------------
GET /my_locations/_search GET /my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_distance" : { "geo_distance": {
"distance" : "12km", "distance": "12km",
"pin.location" : [-70, 40] "pin.location": [ -70, 40 ]
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -132,19 +132,19 @@ Format in `lat,lon`.
-------------------------------------------------- --------------------------------------------------
GET /my_locations/_search GET /my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_distance" : { "geo_distance": {
"distance" : "12km", "distance": "12km",
"pin.location" : "40,-70" "pin.location": "40,-70"
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -155,19 +155,19 @@ GET /my_locations/_search
-------------------------------------------------- --------------------------------------------------
GET /my_locations/_search GET /my_locations/_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_distance" : { "geo_distance": {
"distance" : "12km", "distance": "12km",
"pin.location" : "drm3btev3e86" "pin.location": "drm3btev3e86"
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -11,24 +11,24 @@ points. Here is an example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_polygon" : { "geo_polygon": {
"person.location" : { "person.location": {
"points" : [ "points": [
{"lat" : 40, "lon" : -70}, { "lat": 40, "lon": -70 },
{"lat" : 30, "lon" : -80}, { "lat": 30, "lon": -80 },
{"lat" : 20, "lon" : -90} { "lat": 20, "lon": -90 }
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -60,24 +60,24 @@ conform with http://geojson.org/[GeoJSON].
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_polygon" : { "geo_polygon": {
"person.location" : { "person.location": {
"points" : [ "points": [
[-70, 40], [ -70, 40 ],
[-80, 30], [ -80, 30 ],
[-90, 20] [ -90, 20 ]
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -90,24 +90,24 @@ Format in `lat,lon`.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_polygon" : { "geo_polygon": {
"person.location" : { "person.location": {
"points" : [ "points": [
"40, -70", "40, -70",
"30, -80", "30, -80",
"20, -90" "20, -90"
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -118,24 +118,24 @@ GET /_search
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool": {
"must" : { "must": {
"match_all" : {} "match_all": {}
}, },
"filter" : { "filter": {
"geo_polygon" : { "geo_polygon": {
"person.location" : { "person.location": {
"points" : [ "points": [
"drn5x1g8cu2y", "drn5x1g8cu2y",
"30, -80", "30, -80",
"20, -90" "20, -90"
] ]
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -31,22 +31,22 @@ Given the following index with locations as `geo_shape` fields:
-------------------------------------------------- --------------------------------------------------
PUT /example PUT /example
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"location": { "location": {
"type": "geo_shape" "type": "geo_shape"
} }
}
} }
}
} }
POST /example/_doc?refresh POST /example/_doc?refresh
{ {
"name": "Wind & Wetter, Berlin, Germany", "name": "Wind & Wetter, Berlin, Germany",
"location": { "location": {
"type": "point", "type": "point",
"coordinates": [13.400544, 52.530286] "coordinates": [ 13.400544, 52.530286 ]
} }
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -59,24 +59,24 @@ extension:
-------------------------------------------------- --------------------------------------------------
GET /example/_search GET /example/_search
{ {
"query":{ "query": {
"bool": { "bool": {
"must": { "must": {
"match_all": {} "match_all": {}
},
"filter": {
"geo_shape": {
"location": {
"shape": {
"type": "envelope",
"coordinates": [ [ 13.0, 53.0 ], [ 14.0, 52.0 ] ]
}, },
"filter": { "relation": "within"
"geo_shape": { }
"location": {
"shape": {
"type": "envelope",
"coordinates" : [[13.0, 53.0], [14.0, 52.0]]
},
"relation": "within"
}
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -87,19 +87,19 @@ The above query can, similarly, be queried on `geo_point` fields.
-------------------------------------------------- --------------------------------------------------
PUT /example_points PUT /example_points
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"location": { "location": {
"type": "geo_point" "type": "geo_point"
} }
}
} }
}
} }
PUT /example_points/_doc/1?refresh PUT /example_points/_doc/1?refresh
{ {
"name": "Wind & Wetter, Berlin, Germany", "name": "Wind & Wetter, Berlin, Germany",
"location": [13.400544, 52.530286] "location": [13.400544, 52.530286]
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -112,24 +112,24 @@ returned.
-------------------------------------------------- --------------------------------------------------
GET /example_points/_search GET /example_points/_search
{ {
"query":{ "query": {
"bool": { "bool": {
"must": { "must": {
"match_all": {} "match_all": {}
},
"filter": {
"geo_shape": {
"location": {
"shape": {
"type": "envelope",
"coordinates": [ [ 13.0, 53.0 ], [ 14.0, 52.0 ] ]
}, },
"filter": { "relation": "intersects"
"geo_shape": { }
"location": {
"shape": {
"type": "envelope",
"coordinates" : [[13.0, 53.0], [14.0, 52.0]]
},
"relation": "intersects"
}
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -191,40 +191,40 @@ shape:
-------------------------------------------------- --------------------------------------------------
PUT /shapes PUT /shapes
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"location": { "location": {
"type": "geo_shape" "type": "geo_shape"
} }
}
} }
}
} }
PUT /shapes/_doc/deu PUT /shapes/_doc/deu
{ {
"location": { "location": {
"type": "envelope", "type": "envelope",
"coordinates" : [[13.0, 53.0], [14.0, 52.0]] "coordinates" : [[13.0, 53.0], [14.0, 52.0]]
} }
} }
GET /example/_search GET /example/_search
{ {
"query": { "query": {
"bool": { "bool": {
"filter": { "filter": {
"geo_shape": { "geo_shape": {
"location": { "location": {
"indexed_shape": { "indexed_shape": {
"index": "shapes", "index": "shapes",
"id": "deu", "id": "deu",
"path": "location" "path": "location"
}
}
}
} }
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -31,16 +31,16 @@ field mapping. For example:
---- ----
PUT /my_index PUT /my_index
{ {
"mappings": { "mappings": {
"properties" : { "properties": {
"my-join-field" : { "my-join-field": {
"type" : "join", "type": "join",
"relations": { "relations": {
"parent": "child" "parent": "child"
}
}
} }
}
} }
}
} }
---- ----
@ -53,17 +53,17 @@ PUT /my_index
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"has_child" : { "has_child": {
"type" : "child", "type": "child",
"query" : { "query": {
"match_all" : {} "match_all": {}
}, },
"max_children": 10, "max_children": 10,
"min_children": 2, "min_children": 2,
"score_mode" : "min" "score_mode": "min"
}
} }
}
} }
---- ----
@ -141,18 +141,18 @@ sorts returned documents by the `click_count` field of their child documents.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"has_child" : { "has_child": {
"type" : "child", "type": "child",
"query" : { "query": {
"function_score" : { "function_score": {
"script_score": { "script_score": {
"script": "_score * doc['click_count'].value" "script": "_score * doc['click_count'].value"
} }
}
},
"score_mode" : "max"
} }
},
"score_mode": "max"
} }
}
} }
---- ----

View File

@ -27,19 +27,19 @@ field mapping. For example:
---- ----
PUT /my-index PUT /my-index
{ {
"mappings": { "mappings": {
"properties" : { "properties": {
"my-join-field" : { "my-join-field": {
"type" : "join", "type": "join",
"relations": { "relations": {
"parent": "child" "parent": "child"
}
},
"tag" : {
"type" : "keyword"
}
} }
},
"tag": {
"type": "keyword"
}
} }
}
} }
---- ----
@ -52,18 +52,18 @@ PUT /my-index
---- ----
GET /my-index/_search GET /my-index/_search
{ {
"query": { "query": {
"has_parent" : { "has_parent": {
"parent_type" : "parent", "parent_type": "parent",
"query" : { "query": {
"term" : { "term": {
"tag" : { "tag": {
"value" : "Elasticsearch" "value": "Elasticsearch"
} }
}
}
} }
}
} }
}
} }
---- ----
@ -122,18 +122,18 @@ sorts returned documents by the `view_count` field of their parent documents.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"has_parent" : { "has_parent": {
"parent_type" : "parent", "parent_type": "parent",
"score" : true, "score": true,
"query" : { "query": {
"function_score" : { "function_score": {
"script_score": { "script_score": {
"script": "_score * doc['view_count'].value" "script": "_score * doc['view_count'].value"
} }
}
}
} }
}
} }
}
} }
---- ----

View File

@ -13,11 +13,11 @@ the <<mapping-id-field,`_id`>> field.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"ids" : { "ids" : {
"values" : ["1", "4", "100"] "values" : ["1", "4", "100"]
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -23,9 +23,9 @@ The `_score` can be changed with the `boost` parameter:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_all": { "boost" : 1.2 } "match_all": { "boost" : 1.2 }
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -39,8 +39,8 @@ This is the inverse of the `match_all` query, which matches no documents.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_none": {} "match_none": {}
} }
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -13,11 +13,11 @@ is used in a `term` query. The last term is used in a `prefix` query. A
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_bool_prefix" : { "match_bool_prefix" : {
"message" : "quick brown f" "message" : "quick brown f"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -28,15 +28,15 @@ following `bool` query
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool" : {
"should": [ "should": [
{ "term": { "message": "quick" }}, { "term": { "message": "quick" }},
{ "term": { "message": "brown" }}, { "term": { "message": "brown" }},
{ "prefix": { "message": "f"}} { "prefix": { "message": "f"}}
] ]
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -59,14 +59,14 @@ configured with the `analyzer` parameter
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_bool_prefix" : { "match_bool_prefix": {
"message": { "message": {
"query": "quick brown f", "query": "quick brown f",
"analyzer": "keyword" "analyzer": "keyword"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -22,13 +22,13 @@ brown ferrets` but not `the fox is quick and brown`.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_phrase_prefix" : { "match_phrase_prefix": {
"message" : { "message": {
"query" : "quick brown f" "query": "quick brown f"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -11,11 +11,11 @@ out of the analyzed text. For example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_phrase" : { "match_phrase": {
"message" : "this is a test" "message": "this is a test"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -30,14 +30,14 @@ definition, or the default search analyzer, for example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match_phrase" : { "match_phrase": {
"message" : { "message": {
"query" : "this is a test", "query": "this is a test",
"analyzer" : "my_analyzer" "analyzer": "my_analyzer"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -18,13 +18,13 @@ including options for fuzzy matching.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match" : { "match": {
"message" : { "message": {
"query" : "this is a test" "query": "this is a test"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -150,11 +150,11 @@ parameters. For example:
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"match" : { "match": {
"message" : "this is a test" "message": "this is a test"
}
} }
}
} }
---- ----
@ -175,14 +175,14 @@ Here is an example with the `operator` parameter:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match" : { "match": {
"message" : { "message": {
"query" : "this is a test", "query": "this is a test",
"operator" : "and" "operator": "and"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -219,14 +219,14 @@ which does not support fuzzy expansion.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match" : { "match": {
"message" : { "message": {
"query" : "this is a testt", "query": "this is a testt",
"fuzziness": "AUTO" "fuzziness": "AUTO"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -241,15 +241,15 @@ change that the `zero_terms_query` option can be used, which accepts
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"match" : { "match": {
"message" : { "message": {
"query" : "to be or not to be", "query": "to be or not to be",
"operator" : "and", "operator": "and",
"zero_terms_query": "all" "zero_terms_query": "all"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -19,14 +19,14 @@ fields, limiting the number of selected terms to 12.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"more_like_this" : { "more_like_this" : {
"fields" : ["title", "description"], "fields" : ["title", "description"],
"like" : "Once upon a time", "like" : "Once upon a time",
"min_term_freq" : 1, "min_term_freq" : 1,
"max_query_terms" : 12 "max_query_terms" : 12
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -38,24 +38,24 @@ similar to the one used in the <<docs-multi-get,Multi GET API>>.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"more_like_this" : { "more_like_this": {
"fields" : ["title", "description"], "fields": [ "title", "description" ],
"like" : [ "like": [
{ {
"_index" : "imdb", "_index": "imdb",
"_id" : "1" "_id": "1"
}, },
{ {
"_index" : "imdb", "_index": "imdb",
"_id" : "2" "_id": "2"
}, },
"and potentially some more text here as well" "and potentially some more text here as well"
], ],
"min_term_freq" : 1, "min_term_freq": 1,
"max_query_terms" : 12 "max_query_terms": 12
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -67,29 +67,29 @@ present in the index, the syntax is similar to <<docs-termvectors-artificial-doc
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"more_like_this" : { "more_like_this": {
"fields" : ["name.first", "name.last"], "fields": [ "name.first", "name.last" ],
"like" : [ "like": [
{ {
"_index" : "marvel", "_index": "marvel",
"doc" : { "doc": {
"name": { "name": {
"first": "Ben", "first": "Ben",
"last": "Grimm" "last": "Grimm"
},
"_doc": "You got no idea what I'd... what I'd give to be invisible."
}
}, },
{ "_doc": "You got no idea what I'd... what I'd give to be invisible."
"_index" : "marvel", }
"_id" : "2" },
} {
], "_index": "marvel",
"min_term_freq" : 1, "_id": "2"
"max_query_terms" : 12
} }
],
"min_term_freq": 1,
"max_query_terms": 12
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -121,27 +121,27 @@ default, but there will be no speed up on analysis for these fields.
-------------------------------------------------- --------------------------------------------------
PUT /imdb PUT /imdb
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"title": { "title": {
"type": "text", "type": "text",
"term_vector": "yes" "term_vector": "yes"
}, },
"description": { "description": {
"type": "text" "type": "text"
}, },
"tags": { "tags": {
"type": "text", "type": "text",
"fields" : { "fields": {
"raw": { "raw": {
"type" : "text", "type": "text",
"analyzer": "keyword", "analyzer": "keyword",
"term_vector" : "yes" "term_vector": "yes"
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -23,13 +23,13 @@ mapping. For example:
---- ----
PUT /my_index PUT /my_index
{ {
"mappings" : { "mappings": {
"properties" : { "properties": {
"obj1" : { "obj1": {
"type" : "nested" "type": "nested"
} }
}
} }
}
} }
---- ----
@ -41,20 +41,20 @@ PUT /my_index
---- ----
GET /my_index/_search GET /my_index/_search
{ {
"query": { "query": {
"nested" : { "nested": {
"path" : "obj1", "path": "obj1",
"query" : { "query": {
"bool" : { "bool": {
"must" : [ "must": [
{ "match" : {"obj1.name" : "blue"} }, { "match": { "obj1.name": "blue" } },
{ "range" : {"obj1.count" : {"gt" : 5}} } { "range": { "obj1.count": { "gt": 5 } } }
] ]
}
},
"score_mode" : "avg"
} }
},
"score_mode": "avg"
} }
}
} }
---- ----
// TEST[continued] // TEST[continued]
@ -133,29 +133,29 @@ with nested `make` and `model` fields.
---- ----
PUT /drivers PUT /drivers
{ {
"mappings" : { "mappings": {
"properties" : { "properties": {
"driver" : { "driver": {
"type" : "nested", "type": "nested",
"properties" : { "properties": {
"last_name" : { "last_name": {
"type" : "text" "type": "text"
}, },
"vehicle" : { "vehicle": {
"type" : "nested", "type": "nested",
"properties" : { "properties": {
"make" : { "make": {
"type" : "text" "type": "text"
}, },
"model" : { "model": {
"type" : "text" "type": "text"
} }
}
}
}
} }
}
} }
}
} }
}
} }
---- ----
@ -206,24 +206,24 @@ to match documents based on the `make` and `model` fields.
---- ----
GET /drivers/_search GET /drivers/_search
{ {
"query" : { "query": {
"nested" : { "nested": {
"path" : "driver", "path": "driver",
"query" : { "query": {
"nested" : { "nested": {
"path" : "driver.vehicle", "path": "driver.vehicle",
"query" : { "query": {
"bool" : { "bool": {
"must" : [ "must": [
{ "match" : { "driver.vehicle.make" : "Powell Motors" } }, { "match": { "driver.vehicle.make": "Powell Motors" } },
{ "match" : { "driver.vehicle.model" : "Canyonero" } } { "match": { "driver.vehicle.model": "Canyonero" } }
] ]
}
}
}
} }
}
} }
}
} }
}
} }
---- ----
// TEST[continued] // TEST[continued]

View File

@ -24,16 +24,16 @@ the following example.
---- ----
PUT /my-index PUT /my-index
{ {
"mappings": { "mappings": {
"properties" : { "properties": {
"my-join-field" : { "my-join-field": {
"type" : "join", "type": "join",
"relations": { "relations": {
"my-parent": "my-child" "my-parent": "my-child"
}
}
} }
}
} }
}
} }
---- ----

View File

@ -18,16 +18,16 @@ Create an index with two fields:
-------------------------------------------------- --------------------------------------------------
PUT /my-index PUT /my-index
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"message": { "message": {
"type": "text" "type": "text"
}, },
"query": { "query": {
"type": "percolator" "type": "percolator"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -46,11 +46,11 @@ Register a query in the percolator:
-------------------------------------------------- --------------------------------------------------
PUT /my-index/_doc/1?refresh PUT /my-index/_doc/1?refresh
{ {
"query" : { "query": {
"match" : { "match": {
"message" : "bonsai tree" "message": "bonsai tree"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -61,14 +61,14 @@ Match a document to the registered percolator queries:
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"percolate" : { "percolate": {
"field" : "query", "field": "query",
"document" : { "document": {
"message" : "A new bonsai tree in the office" "message": "A new bonsai tree in the office"
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -159,18 +159,18 @@ the percolator query in a `bool` query's filter clause or in a `constant_score`
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"constant_score": { "constant_score": {
"filter": { "filter": {
"percolate" : { "percolate": {
"field" : "query", "field": "query",
"document" : { "document": {
"message" : "A new bonsai tree in the office" "message": "A new bonsai tree in the office"
} }
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -199,25 +199,25 @@ correlate with the slot in the `documents` array specified in the `percolate` qu
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"percolate" : { "percolate": {
"field" : "query", "field": "query",
"documents" : [ <1> "documents": [ <1>
{ {
"message" : "bonsai tree" "message": "bonsai tree"
}, },
{ {
"message" : "new tree" "message": "new tree"
}, },
{ {
"message" : "the office" "message": "the office"
}, },
{ {
"message" : "office tree" "message": "office tree"
}
]
} }
]
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -315,14 +315,14 @@ Percolating an existing document, using the index response as basis to build to
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"percolate" : { "percolate": {
"field": "query", "field": "query",
"index" : "my-index", "index": "my-index",
"id" : "2", "id": "2",
"version" : 1 <1> "version": 1 <1>
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -351,11 +351,11 @@ Save a query:
-------------------------------------------------- --------------------------------------------------
PUT /my-index/_doc/3?refresh PUT /my-index/_doc/3?refresh
{ {
"query" : { "query": {
"match" : { "match": {
"message" : "brown fox" "message": "brown fox"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -366,11 +366,11 @@ Save another query:
-------------------------------------------------- --------------------------------------------------
PUT /my-index/_doc/4?refresh PUT /my-index/_doc/4?refresh
{ {
"query" : { "query": {
"match" : { "match": {
"message" : "lazy dog" "message": "lazy dog"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -381,19 +381,19 @@ Execute a search request with the `percolate` query and highlighting enabled:
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"percolate" : { "percolate": {
"field": "query", "field": "query",
"document" : { "document": {
"message" : "The quick brown fox jumps over the lazy dog" "message": "The quick brown fox jumps over the lazy dog"
}
}
},
"highlight": {
"fields": {
"message": {}
} }
} }
},
"highlight": {
"fields": {
"message": {}
}
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -477,30 +477,30 @@ When percolating multiple documents at the same time like the request below then
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"percolate" : { "percolate": {
"field": "query", "field": "query",
"documents" : [ "documents": [
{ {
"message" : "bonsai tree" "message": "bonsai tree"
}, },
{ {
"message" : "new tree" "message": "new tree"
}, },
{ {
"message" : "the office" "message": "the office"
}, },
{ {
"message" : "office tree" "message": "office tree"
}
]
} }
}, ]
"highlight": {
"fields": {
"message": {}
}
} }
},
"highlight": {
"fields": {
"message": {}
}
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]
@ -570,30 +570,30 @@ It is possible to specify multiple `percolate` queries in a single search reques
-------------------------------------------------- --------------------------------------------------
GET /my-index/_search GET /my-index/_search
{ {
"query" : { "query": {
"bool" : { "bool": {
"should" : [ "should": [
{ {
"percolate" : { "percolate": {
"field" : "query", "field": "query",
"document" : { "document": {
"message" : "bonsai tree" "message": "bonsai tree"
}, },
"name": "query1" <1> "name": "query1" <1>
} }
}, },
{ {
"percolate" : { "percolate": {
"field" : "query", "field": "query",
"document" : { "document": {
"message" : "tulip flower" "message": "tulip flower"
}, },
"name": "query2" <1> "name": "query2" <1>
} }
}
]
} }
]
} }
}
} }
-------------------------------------------------- --------------------------------------------------
// TEST[continued] // TEST[continued]

View File

@ -14,16 +14,16 @@ the <<mapping-id-field,`_id`>> field.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"pinned" : { "pinned": {
"ids" : ["1", "4", "100"], "ids": [ "1", "4", "100" ],
"organic" : { "organic": {
"match":{ "match": {
"description": "brown shoes" "description": "iphone"
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -16,13 +16,13 @@ that begins with `ki`.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"prefix": { "prefix": {
"user": { "user": {
"value": "ki" "value": "ki"
} }
}
} }
}
} }
---- ----
@ -53,9 +53,9 @@ You can simplify the `prefix` query syntax by combining the `<field>` and
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"prefix" : { "user" : "ki" } "prefix" : { "user" : "ki" }
} }
} }
---- ----

View File

@ -42,12 +42,12 @@ whitespace as an operator, `new york city` is passed as-is to the analyzer.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string" : { "query_string": {
"query" : "(new york city) OR (big apple)", "query": "(new york city) OR (big apple)",
"default_field" : "content" "default_field": "content"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -265,12 +265,12 @@ For example, the following query
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string" : { "query_string": {
"fields" : ["content", "name"], "fields": [ "content", "name" ],
"query" : "this AND that" "query": "this AND that"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -281,11 +281,11 @@ matches the same words as
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string": { "query_string": {
"query": "(content:this OR name:this) AND (content:that OR name:that)" "query": "(content:this OR name:this) AND (content:that OR name:that)"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -297,13 +297,13 @@ For example (the `name` is boosted by 5 using `^5` notation):
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string" : { "query_string" : {
"fields" : ["content", "name^5"], "fields" : ["content", "name^5"],
"query" : "this AND that OR thus", "query" : "this AND that OR thus",
"tie_breaker" : 0 "tie_breaker" : 0
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -316,12 +316,12 @@ search on all "city" fields:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string" : { "query_string" : {
"fields" : ["city.*"], "fields" : ["city.*"],
"query" : "this AND that OR thus" "query" : "this AND that OR thus"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -333,11 +333,11 @@ string itself (properly escaping the `*` sign), for example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string" : { "query_string" : {
"query" : "city.\\*:(this AND that OR thus)" "query" : "city.\\*:(this AND that OR thus)"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -352,12 +352,12 @@ introduced fields included). For example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string" : { "query_string" : {
"fields" : ["content", "name.*^5"], "fields" : ["content", "name.*^5"],
"query" : "this AND that OR thus" "query" : "this AND that OR thus"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -446,15 +446,15 @@ many "should" clauses in the resulting query should match.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string": { "query_string": {
"fields": [ "fields": [
"title" "title"
], ],
"query": "this that thus", "query": "this that thus",
"minimum_should_match": 2 "minimum_should_match": 2
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -472,16 +472,16 @@ in the single field `title`.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string": { "query_string": {
"fields": [ "fields": [
"title", "title",
"content" "content"
], ],
"query": "this that thus", "query": "this that thus",
"minimum_should_match": 2 "minimum_should_match": 2
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -496,16 +496,16 @@ that matches documents with the disjunction max over the fields `title` and
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string": { "query_string": {
"fields": [ "fields": [
"title", "title",
"content" "content"
], ],
"query": "this OR that OR thus", "query": "this OR that OR thus",
"minimum_should_match": 2 "minimum_should_match": 2
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -528,17 +528,17 @@ analyzer are grouped together when the input is analyzed.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"query_string": { "query_string": {
"fields": [ "fields": [
"title", "title",
"content" "content"
], ],
"query": "this OR that OR thus", "query": "this OR that OR thus",
"type": "cross_fields", "type": "cross_fields",
"minimum_should_match": 2 "minimum_should_match": 2
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -16,15 +16,15 @@ between `10` and `20`.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"range" : { "range": {
"age" : { "age": {
"gte" : 10, "gte": 10,
"lte" : 20, "lte": 20,
"boost" : 2.0 "boost": 2.0
} }
}
} }
}
} }
---- ----
@ -157,14 +157,14 @@ contains a date between today and yesterday.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"range" : { "range": {
"timestamp" : { "timestamp": {
"gte" : "now-1d/d", "gte": "now-1d/d",
"lt" : "now/d" "lt": "now/d"
} }
}
} }
}
} }
---- ----

View File

@ -23,16 +23,16 @@ terms can include `ky`, `kay`, and `kimchy`.
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"regexp": { "regexp": {
"user": { "user": {
"value": "k.*y", "value": "k.*y",
"flags" : "ALL", "flags": "ALL",
"max_determinized_states": 10000, "max_determinized_states": 10000,
"rewrite": "constant_score" "rewrite": "constant_score"
} }
}
} }
}
} }
---- ----

View File

@ -18,18 +18,18 @@ WARNING: Using scripts can result in slower search speeds. See
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool": {
"filter" : { "filter": {
"script" : { "script": {
"script" : { "script": {
"source": "doc['num1'].value > 1", "source": "doc['num1'].value > 1",
"lang": "painless" "lang": "painless"
} }
}
}
} }
}
} }
}
} }
---- ----
@ -55,21 +55,21 @@ in the script's `params` parameter. For example:
---- ----
GET /_search GET /_search
{ {
"query": { "query": {
"bool" : { "bool": {
"filter" : { "filter": {
"script" : { "script": {
"script" : { "script": {
"source" : "doc['num1'].value > params.param1", "source": "doc['num1'].value > params.param1",
"lang" : "painless", "lang": "painless",
"params" : { "params": {
"param1" : 5 "param1": 5
}
}
}
} }
}
} }
}
} }
}
} }
---- ----

View File

@ -18,16 +18,16 @@ The following `script_score` query assigns each returned document a score equal
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query" : { "query": {
"script_score" : { "script_score": {
"query" : { "query": {
"match": { "message": "elasticsearch" } "match": { "message": "elasticsearch" }
}, },
"script" : { "script": {
"source" : "doc['likes'].value / 10 " "source": "doc['likes'].value / 10 "
} }
} }
} }
} }
-------------------------------------------------- --------------------------------------------------
@ -345,23 +345,23 @@ Using an <<search-explain, explain request>> provides an explanation of how the
-------------------------------------------------- --------------------------------------------------
GET /twitter/_explain/0 GET /twitter/_explain/0
{ {
"query" : { "query": {
"script_score" : { "script_score": {
"query" : { "query": {
"match": { "message": "elasticsearch" } "match": { "message": "elasticsearch" }
}, },
"script" : { "script": {
"source" : """ "source": """
long likes = doc['likes'].value; long likes = doc['likes'].value;
double normalizedLikes = likes / 10; double normalizedLikes = likes / 10;
if (explanation != null) { if (explanation != null) {
explanation.set('normalized likes = likes / 10 = ' + likes + ' / 10 = ' + normalizedLikes); explanation.set('normalized likes = likes / 10 = ' + likes + ' / 10 = ' + normalizedLikes);
} }
return normalizedLikes; return normalizedLikes;
""" """
} }
} }
} }
} }
-------------------------------------------------- --------------------------------------------------
// TEST[setup:twitter] // TEST[setup:twitter]

View File

@ -28,22 +28,22 @@ Given the following index:
-------------------------------------------------- --------------------------------------------------
PUT /example PUT /example
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"geometry": { "geometry": {
"type": "shape" "type": "shape"
} }
}
} }
}
} }
PUT /example/_doc/1?refresh=wait_for PUT /example/_doc/1?refresh=wait_for
{ {
"name": "Lucky Landing", "name": "Lucky Landing",
"geometry": { "geometry": {
"type": "point", "type": "point",
"coordinates": [1355.400544, 5255.530286] "coordinates": [ 1355.400544, 5255.530286 ]
} }
} }
-------------------------------------------------- --------------------------------------------------
// TESTSETUP // TESTSETUP
@ -55,17 +55,17 @@ The following query will find the point using the Elasticsearch's
-------------------------------------------------- --------------------------------------------------
GET /example/_search GET /example/_search
{ {
"query":{ "query": {
"shape": {
"geometry": {
"shape": { "shape": {
"geometry": { "type": "envelope",
"shape": { "coordinates": [ [ 1355.0, 5355.0 ], [ 1400.0, 5200.0 ] ]
"type": "envelope", },
"coordinates" : [[1355.0, 5355.0], [1400.0, 5200.0]] "relation": "within"
}, }
"relation": "within"
}
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -134,36 +134,36 @@ shape:
-------------------------------------------------- --------------------------------------------------
PUT /shapes PUT /shapes
{ {
"mappings": { "mappings": {
"properties": { "properties": {
"geometry": { "geometry": {
"type": "shape" "type": "shape"
} }
}
} }
}
} }
PUT /shapes/_doc/footprint PUT /shapes/_doc/footprint
{ {
"geometry": { "geometry": {
"type": "envelope", "type": "envelope",
"coordinates" : [[1355.0, 5355.0], [1400.0, 5200.0]] "coordinates": [ [ 1355.0, 5355.0 ], [ 1400.0, 5200.0 ] ]
} }
} }
GET /example/_search GET /example/_search
{ {
"query": { "query": {
"shape": { "shape": {
"geometry": { "geometry": {
"indexed_shape": { "indexed_shape": {
"index": "shapes", "index": "shapes",
"id": "footprint", "id": "footprint",
"path": "geometry" "path": "geometry"
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -156,12 +156,12 @@ value. For example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"simple_query_string" : { "simple_query_string": {
"fields" : ["content"], "fields": [ "content" ],
"query" : "foo bar -baz" "query": "foo bar -baz"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -184,12 +184,12 @@ and `PREFIX`.
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"simple_query_string" : { "simple_query_string": {
"query" : "foo | bar + baz*", "query": "foo | bar + baz*",
"flags" : "OR|AND|PREFIX" "flags": "OR|AND|PREFIX"
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -11,23 +11,23 @@ query maps to Lucene `SpanContainingQuery`. Here is an example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"span_containing" : { "span_containing": {
"little" : { "little": {
"span_term" : { "field1" : "foo" } "span_term": { "field1": "foo" }
}, },
"big" : { "big": {
"span_near" : { "span_near": {
"clauses" : [ "clauses": [
{ "span_term" : { "field1" : "bar" } }, { "span_term": { "field1": "bar" } },
{ "span_term" : { "field1" : "baz" } } { "span_term": { "field1": "baz" } }
], ],
"slop" : 5, "slop": 5,
"in_order" : true "in_order": true
}
}
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -11,14 +11,14 @@ to Lucene `SpanFirstQuery`. Here is an example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"span_first" : { "span_first": {
"match" : { "match": {
"span_term" : { "user" : "kimchy" } "span_term": { "user": "kimchy" }
}, },
"end" : 3 "end": 3
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -12,13 +12,13 @@ it can be nested. Example:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"span_multi":{ "span_multi": {
"match":{ "match": {
"prefix" : { "user" : { "value" : "ki" } } "prefix": { "user": { "value": "ki" } }
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------
@ -28,13 +28,13 @@ A boost can also be associated with the query:
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
{ {
"query": { "query": {
"span_multi":{ "span_multi": {
"match":{ "match": {
"prefix" : { "user" : { "value" : "ki", "boost" : 1.08 } } "prefix": { "user": { "value": "ki", "boost": 1.08 } }
} }
}
} }
}
} }
-------------------------------------------------- --------------------------------------------------

Some files were not shown because too many files have changed in this diff Show More