Convert rest of query-dsl docs to be run in tests

This commit is contained in:
Isabel Drost-Fromm 2016-05-11 14:37:19 +02:00
parent ab4367c07e
commit 85f1ab44d9
25 changed files with 814 additions and 517 deletions

View File

@ -8,19 +8,23 @@ overall score.
[source,js]
--------------------------------------------------
GET /_search
{
"boosting" : {
"positive" : {
"term" : {
"field1" : "value1"
}
},
"negative" : {
"term" : {
"field2" : "value2"
}
},
"negative_boost" : 0.2
"query": {
"boosting" : {
"positive" : {
"term" : {
"field1" : "value1"
}
},
"negative" : {
"term" : {
"field2" : "value2"
}
},
"negative_boost" : 0.2
}
}
}
--------------------------------------------------
// CONSOLE

View File

@ -70,15 +70,19 @@ In this example, words that have a document frequency greater than 0.1%
[source,js]
--------------------------------------------------
GET /_search
{
"common": {
"body": {
"query": "this is bonsai cool",
"cutoff_frequency": 0.001
"query": {
"common": {
"body": {
"query": "this is bonsai cool",
"cutoff_frequency": 0.001
}
}
}
}
}
--------------------------------------------------
// CONSOLE
The number of terms which should match can be controlled with the
<<query-dsl-minimum-should-match,`minimum_should_match`>>
@ -90,36 +94,44 @@ all terms required:
[source,js]
--------------------------------------------------
GET /_search
{
"common": {
"body": {
"query": "nelly the elephant as a cartoon",
"cutoff_frequency": 0.001,
"low_freq_operator": "and"
"query": {
"common": {
"body": {
"query": "nelly the elephant as a cartoon",
"cutoff_frequency": 0.001,
"low_freq_operator": "and"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
which is roughly equivalent to:
[source,js]
--------------------------------------------------
GET /_search
{
"bool": {
"must": [
{ "term": { "body": "nelly"}},
{ "term": { "body": "elephant"}},
{ "term": { "body": "cartoon"}}
],
"should": [
{ "term": { "body": "the"}}
{ "term": { "body": "as"}}
{ "term": { "body": "a"}}
]
}
"query": {
"bool": {
"must": [
{ "term": { "body": "nelly"}},
{ "term": { "body": "elephant"}},
{ "term": { "body": "cartoon"}}
],
"should": [
{ "term": { "body": "the"}},
{ "term": { "body": "as"}},
{ "term": { "body": "a"}}
]
}
}
}
--------------------------------------------------
// CONSOLE
Alternatively use
<<query-dsl-minimum-should-match,`minimum_should_match`>>
@ -128,41 +140,49 @@ must be present, for instance:
[source,js]
--------------------------------------------------
GET /_search
{
"common": {
"body": {
"query": "nelly the elephant as a cartoon",
"cutoff_frequency": 0.001,
"minimum_should_match": 2
"query": {
"common": {
"body": {
"query": "nelly the elephant as a cartoon",
"cutoff_frequency": 0.001,
"minimum_should_match": 2
}
}
}
}
}
--------------------------------------------------
// CONSOLE
which is roughly equivalent to:
[source,js]
--------------------------------------------------
GET /_search
{
"bool": {
"must": {
"bool": {
"should": [
{ "term": { "body": "nelly"}},
{ "term": { "body": "elephant"}},
{ "term": { "body": "cartoon"}}
],
"minimum_should_match": 2
}
},
"should": [
{ "term": { "body": "the"}}
{ "term": { "body": "as"}}
{ "term": { "body": "a"}}
]
}
"query": {
"bool": {
"must": {
"bool": {
"should": [
{ "term": { "body": "nelly"}},
{ "term": { "body": "elephant"}},
{ "term": { "body": "cartoon"}}
],
"minimum_should_match": 2
}
},
"should": [
{ "term": { "body": "the"}},
{ "term": { "body": "as"}},
{ "term": { "body": "a"}}
]
}
}
}
--------------------------------------------------
// CONSOLE
minimum_should_match
@ -174,50 +194,58 @@ additional parameters (note the change in structure):
[source,js]
--------------------------------------------------
GET /_search
{
"common": {
"body": {
"query": "nelly the elephant not as a cartoon",
"cutoff_frequency": 0.001,
"minimum_should_match": {
"low_freq" : 2,
"high_freq" : 3
}
"query": {
"common": {
"body": {
"query": "nelly the elephant not as a cartoon",
"cutoff_frequency": 0.001,
"minimum_should_match": {
"low_freq" : 2,
"high_freq" : 3
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
which is roughly equivalent to:
[source,js]
--------------------------------------------------
GET /_search
{
"bool": {
"must": {
"bool": {
"should": [
{ "term": { "body": "nelly"}},
{ "term": { "body": "elephant"}},
{ "term": { "body": "cartoon"}}
],
"minimum_should_match": 2
}
},
"should": {
"bool": {
"should": [
{ "term": { "body": "the"}},
{ "term": { "body": "not"}},
{ "term": { "body": "as"}},
{ "term": { "body": "a"}}
],
"minimum_should_match": 3
}
"query": {
"bool": {
"must": {
"bool": {
"should": [
{ "term": { "body": "nelly"}},
{ "term": { "body": "elephant"}},
{ "term": { "body": "cartoon"}}
],
"minimum_should_match": 2
}
},
"should": {
"bool": {
"should": [
{ "term": { "body": "the"}},
{ "term": { "body": "not"}},
{ "term": { "body": "as"}},
{ "term": { "body": "a"}}
],
"minimum_should_match": 3
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
In this case it means the high frequency terms have only an impact on
relevance when there are at least three of them. But the most
@ -227,36 +255,44 @@ for high frequency terms is when there are only high frequency terms:
[source,js]
--------------------------------------------------
GET /_search
{
"common": {
"body": {
"query": "how not to be",
"cutoff_frequency": 0.001,
"minimum_should_match": {
"low_freq" : 2,
"high_freq" : 3
}
"query": {
"common": {
"body": {
"query": "how not to be",
"cutoff_frequency": 0.001,
"minimum_should_match": {
"low_freq" : 2,
"high_freq" : 3
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
which is roughly equivalent to:
[source,js]
--------------------------------------------------
GET /_search
{
"bool": {
"should": [
{ "term": { "body": "how"}},
{ "term": { "body": "not"}},
{ "term": { "body": "to"}},
{ "term": { "body": "be"}}
],
"minimum_should_match": "3<50%"
}
"query": {
"bool": {
"should": [
{ "term": { "body": "how"}},
{ "term": { "body": "not"}},
{ "term": { "body": "to"}},
{ "term": { "body": "be"}}
],
"minimum_should_match": "3<50%"
}
}
}
--------------------------------------------------
// CONSOLE
The high frequency generated query is then slightly less restrictive
than with an `AND`.

View File

@ -27,18 +27,22 @@ This query maps to Lucene `DisjunctionMaxQuery`.
[source,js]
--------------------------------------------------
GET /_search
{
"dis_max" : {
"tie_breaker" : 0.7,
"boost" : 1.2,
"queries" : [
{
"term" : { "age" : 34 }
},
{
"term" : { "age" : 35 }
}
]
"query": {
"dis_max" : {
"tie_breaker" : 0.7,
"boost" : 1.2,
"queries" : [
{
"term" : { "age" : 34 }
},
{
"term" : { "age" : 35 }
}
]
}
}
}
--------------------------------------------------
// CONSOLE

View File

@ -5,10 +5,14 @@ Returns documents that have at least one non-`null` value in the original field:
[source,js]
--------------------------------------------------
GET /_search
{
"exists" : { "field" : "user" }
"query": {
"exists" : { "field" : "user" }
}
}
--------------------------------------------------
// CONSOLE
For instance, these documents would all match the above query:
@ -77,14 +81,20 @@ clause as follows:
[source,js]
--------------------------------------------------
"bool": {
"must_not": {
"exists": {
"field": "user"
GET /_search
{
"query": {
"bool": {
"must_not": {
"exists": {
"field": "user"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
This query returns documents that have no value in the user field.

View File

@ -17,27 +17,35 @@ Here is a simple example:
[source,js]
--------------------------------------------------
GET /_search
{
"fuzzy" : { "user" : "ki" }
"query": {
"fuzzy" : { "user" : "ki" }
}
}
--------------------------------------------------
// CONSOLE
Or with more advanced settings:
[source,js]
--------------------------------------------------
GET /_search
{
"fuzzy" : {
"user" : {
"value" : "ki",
"boost" : 1.0,
"fuzziness" : 2,
"prefix_length" : 0,
"max_expansions": 100
"query": {
"fuzzy" : {
"user" : {
"value" : "ki",
"boost" : 1.0,
"fuzziness" : 2,
"prefix_length" : 0,
"max_expansions": 100
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
===== Parameters
@ -75,29 +83,36 @@ For example:
[source,js]
--------------------------------------------------
GET /_search
{
"fuzzy" : {
"price" : {
"value" : 12,
"fuzziness" : 2
"query": {
"fuzzy" : {
"price" : {
"value" : 12,
"fuzziness" : 2
}
}
}
}
--------------------------------------------------
// CONSOLE
Will result in a range query between 10 and 14. Date fields support
<<time-units,time values>>, eg:
[source,js]
--------------------------------------------------
GET /_search
{
"fuzzy" : {
"created" : {
"value" : "2010-02-05T12:05:07",
"fuzziness" : "1d"
"query": {
"fuzzy" : {
"created" : {
"value" : "2010-02-05T12:05:07",
"fuzziness" : "1d"
}
}
}
}
--------------------------------------------------
// CONSOLE
See <<fuzziness>> for more details about accepted values.

View File

@ -6,6 +6,24 @@ bounding box. Assuming the following indexed document:
[source,js]
--------------------------------------------------
PUT /my_locations
{
"mappings": {
"location": {
"properties": {
"pin": {
"properties": {
"location": {
"type": "geo_point"
}
}
}
}
}
}
}
PUT /my_locations/location/1
{
"pin" : {
"location" : {
@ -15,34 +33,40 @@ bounding box. Assuming the following indexed document:
}
}
--------------------------------------------------
// CONSOLE
// TESTSETUP
Then the following simple query can be executed with a
`geo_bounding_box` filter:
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.01,
"lon" : -71.12
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.01,
"lon" : -71.12
}
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Query Options
@ -75,21 +99,24 @@ representation of the geo point, the filter can accept it as well:
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.01,
"lon" : -71.12
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.01,
"lon" : -71.12
}
}
}
}
@ -97,6 +124,7 @@ representation of the geo point, the filter can accept it as well:
}
}
--------------------------------------------------
// CONSOLE
[float]
===== Lat Lon As Array
@ -106,22 +134,26 @@ conform with http://geojson.org/[GeoJSON].
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : [-74.1, 40.73],
"bottom_right" : [-71.12, 40.01]
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : [-74.1, 40.73],
"bottom_right" : [-71.12, 40.01]
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
===== Lat Lon As String
@ -130,44 +162,52 @@ Format in `lat,lon`.
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : "40.73, -74.1",
"bottom_right" : "40.01, -71.12"
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : "40.73, -74.1",
"bottom_right" : "40.01, -71.12"
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
===== Geohash
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : "dr5r9ydj2y73",
"bottom_right" : "drj7teegpus6"
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : "dr5r9ydj2y73",
"bottom_right" : "drj7teegpus6"
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Vertices
@ -181,24 +221,28 @@ values separately.
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top" : 40.73,
"left" : -74.1,
"bottom" : 40.01,
"right" : -71.12
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top" : 40.73,
"left" : -74.1,
"bottom" : 40.01,
"right" : -71.12
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
@ -227,29 +271,33 @@ are not supported. Here is an example:
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : {
"lat" : 40.73,
"lon" : -74.1
},
"bottom_right" : {
"lat" : 40.10,
"lon" : -71.12
}
},
"bottom_right" : {
"lat" : 40.10,
"lon" : -71.12
}
},
"type" : "indexed"
"type" : "indexed"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Ignore Unmapped

View File

@ -5,24 +5,28 @@ Filters documents that exists within a range from a specific point:
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_distance_range" : {
"from" : "200km",
"to" : "400km",
"pin.location" : {
"lat" : 40,
"lon" : -70
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_distance_range" : {
"from" : "200km",
"to" : "400km",
"pin.location" : {
"lat" : 40,
"lon" : -70
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
Supports the same point location parameter and query options as the
<<query-dsl-geo-distance-query,geo_distance>>

View File

@ -6,25 +6,26 @@ points. Here is an example:
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"query" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
"query": {
"bool" : {
"query" : {
"geo_polygon" : {
"person.location" : {
"points" : [
{"lat" : 40, "lon" : -70},
{"lat" : 30, "lon" : -80},
{"lat" : 20, "lon" : -90}
]
]
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Query Options
@ -53,25 +54,29 @@ conform with http://geojson.org/[GeoJSON].
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
[-70, 40],
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
[-70, 40],
[-80, 30],
[-90, 20]
]
[-90, 20]
]
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
===== Lat Lon as String
@ -80,50 +85,58 @@ Format in `lat,lon`.
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
"40, -70",
"30, -80",
"20, -90"
]
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
"40, -70",
"30, -80",
"20, -90"
]
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
===== Geohash
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
"drn5x1g8cu2y",
"30, -80",
"20, -90"
]
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geo_polygon" : {
"person.location" : {
"points" : [
"drn5x1g8cu2y",
"30, -80",
"20, -90"
]
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== geo_point Type

View File

@ -26,10 +26,10 @@ Given a document that looks like this:
--------------------------------------------------
{
"name": "Wind & Wetter, Berlin, Germany",
"location": {
"type": "Point",
"coordinates": [13.400544, 52.530286]
}
"location": {
"type": "Point",
"coordinates": [13.400544, 52.530286]
}
}
--------------------------------------------------
@ -38,6 +38,7 @@ The following query will find the point using the Elasticsearch's
[source,js]
--------------------------------------------------
GET /_search
{
"query":{
"bool": {
@ -59,6 +60,7 @@ The following query will find the point using the Elasticsearch's
}
}
--------------------------------------------------
// CONSOLE
==== Pre-Indexed Shape
@ -81,26 +83,30 @@ shape:
[source,js]
--------------------------------------------------
GET /_search
{
"bool": {
"must": {
"match_all": {}
},
"filter": {
"geo_shape": {
"location": {
"indexed_shape": {
"id": "DEU",
"type": "countries",
"index": "shapes",
"path": "location"
"query": {
"bool": {
"must": {
"match_all": {}
},
"filter": {
"geo_shape": {
"location": {
"indexed_shape": {
"id": "DEU",
"type": "countries",
"index": "shapes",
"path": "location"
}
}
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
==== Spatial Relations

View File

@ -13,6 +13,7 @@ setting the `geohash_prefix` option:
[source,js]
--------------------------------------------------
PUT /my_index
{
"mappings" : {
"location": {
@ -28,6 +29,8 @@ setting the `geohash_prefix` option:
}
}
--------------------------------------------------
// CONSOLE
// TESTSETUP
The geohash cell can defined by all formats of `geo_points`. If such a cell is
defined by a latitude and longitude pair the size of the cell needs to be
@ -42,24 +45,28 @@ next to the given cell.
[source,js]
--------------------------------------------------
GET /_search
{
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geohash_cell": {
"pin": {
"lat": 13.4080,
"lon": 52.5186
},
"precision": 3,
"neighbors": true
}
"query": {
"bool" : {
"must" : {
"match_all" : {}
},
"filter" : {
"geohash_cell": {
"pin": {
"lat": 13.4080,
"lon": 52.5186
},
"precision": 3,
"neighbors": true
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Ignore Unmapped

View File

@ -7,17 +7,21 @@ an example:
[source,js]
--------------------------------------------------
GET /_search
{
"has_child" : {
"type" : "blog_tag",
"query" : {
"term" : {
"tag" : "something"
}
"query": {
"has_child" : {
"type" : "blog_tag",
"query" : {
"term" : {
"tag" : "something"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Scoring capabilities
@ -32,18 +36,22 @@ inside the `has_child` query:
[source,js]
--------------------------------------------------
GET /_search
{
"has_child" : {
"type" : "blog_tag",
"score_mode" : "min",
"query" : {
"term" : {
"tag" : "something"
}
"query": {
"has_child" : {
"type" : "blog_tag",
"score_mode" : "min",
"query" : {
"term" : {
"tag" : "something"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Min/Max Children
@ -54,20 +62,24 @@ a match:
[source,js]
--------------------------------------------------
GET /_search
{
"has_child" : {
"type" : "blog_tag",
"score_mode" : "min",
"min_children": 2, <1>
"max_children": 10, <1>
"query" : {
"term" : {
"tag" : "something"
}
"query": {
"has_child" : {
"type" : "blog_tag",
"score_mode" : "min",
"min_children": 2, <1>
"max_children": 10, <1>
"query" : {
"term" : {
"tag" : "something"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
<1> Both `min_children` and `max_children` are optional.
The `min_children` and `max_children` parameters can be combined with

View File

@ -9,17 +9,21 @@ in the same manner as the `has_child` query.
[source,js]
--------------------------------------------------
GET /_search
{
"has_parent" : {
"parent_type" : "blog",
"query" : {
"term" : {
"tag" : "something"
}
"query": {
"has_parent" : {
"parent_type" : "blog",
"query" : {
"term" : {
"tag" : "something"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Scoring capabilities
@ -34,18 +38,22 @@ matching parent document. The score mode can be specified with the
[source,js]
--------------------------------------------------
GET /_search
{
"has_parent" : {
"parent_type" : "blog",
"score" : true,
"query" : {
"term" : {
"tag" : "something"
}
"query": {
"has_parent" : {
"parent_type" : "blog",
"score" : true,
"query" : {
"term" : {
"tag" : "something"
}
}
}
}
}
--------------------------------------------------
// CONSOLE
[float]
==== Ignore Unmapped

View File

@ -6,13 +6,17 @@ uses the <<mapping-uid-field,_uid>> field.
[source,js]
--------------------------------------------------
GET /_search
{
"ids" : {
"type" : "my_type",
"values" : ["1", "4", "100"]
"query": {
"ids" : {
"type" : "my_type",
"values" : ["1", "4", "100"]
}
}
}
--------------------------------------------------
// CONSOLE
The `type` is optional and can be omitted, and can also accept an array
of values. If no type is specified, all types defined in the index mapping are tried.

View File

@ -9,18 +9,22 @@ on the list, the alternative `no_match_query` is executed.
[source,js]
--------------------------------------------------
GET /_search
{
"indices" : {
"indices" : ["index1", "index2"],
"query" : {
"term" : { "tag" : "wow" }
},
"no_match_query" : {
"term" : { "tag" : "kow" }
"query": {
"indices" : {
"indices" : ["index1", "index2"],
"query" : {
"term" : { "tag" : "wow" }
},
"no_match_query" : {
"term" : { "tag" : "kow" }
}
}
}
}
--------------------------------------------------
// CONSOLE
You can use the `index` field to provide a single index.

View File

@ -6,12 +6,16 @@ allows for prefix matches on the last term in the text. For example:
[source,js]
--------------------------------------------------
GET /_search
{
"match_phrase_prefix" : {
"message" : "quick brown f"
"query": {
"match_phrase_prefix" : {
"message" : "quick brown f"
}
}
}
--------------------------------------------------
// CONSOLE
It accepts the same parameters as the phrase type. In addition, it also
accepts a `max_expansions` parameter (default `50`) that can control to how
@ -21,15 +25,19 @@ example:
[source,js]
--------------------------------------------------
GET /_search
{
"match_phrase_prefix" : {
"message" : {
"query" : "quick brown f",
"max_expansions" : 10
"query": {
"match_phrase_prefix" : {
"message" : {
"query" : "quick brown f",
"max_expansions" : 10
}
}
}
}
--------------------------------------------------
// CONSOLE
[IMPORTANT]
===================================================

View File

@ -6,12 +6,16 @@ out of the analyzed text. For example:
[source,js]
--------------------------------------------------
GET /_search
{
"match_phrase" : {
"message" : "this is a test"
"query": {
"match_phrase" : {
"message" : "this is a test"
}
}
}
--------------------------------------------------
// CONSOLE
A phrase query matches terms up to a configurable `slop`
(which defaults to 0) in any order. Transposed terms have a slop of 2.
@ -22,12 +26,16 @@ definition, or the default search analyzer, for example:
[source,js]
--------------------------------------------------
GET /_search
{
"match_phrase" : {
"message" : {
"query" : "this is a test",
"analyzer" : "my_analyzer"
"query": {
"match_phrase" : {
"message" : {
"query" : "this is a test",
"analyzer" : "my_analyzer"
}
}
}
}
--------------------------------------------------
// CONSOLE

View File

@ -7,12 +7,16 @@ them, and constructs a query. For example:
[source,js]
--------------------------------------------------
GET /_search
{
"match" : {
"message" : "this is a test"
"query": {
"match" : {
"message" : "this is a test"
}
}
}
--------------------------------------------------
// CONSOLE
Note, `message` is the name of a field, you can substitute the name of
any field (including `_all`) instead.
@ -54,15 +58,19 @@ change in structure, `message` is the field name):
[source,js]
--------------------------------------------------
GET /_search
{
"match" : {
"message" : {
"query" : "this is a test",
"operator" : "and"
"query": {
"match" : {
"message" : {
"query" : "this is a test",
"operator" : "and"
}
}
}
}
--------------------------------------------------
// CONSOLE
[[query-dsl-match-query-zero]]
===== Zero terms query
@ -73,16 +81,20 @@ change that the `zero_terms_query` option can be used, which accepts
[source,js]
--------------------------------------------------
GET /_search
{
"match" : {
"message" : {
"query" : "to be or not to be",
"operator" : "and",
"zero_terms_query": "all"
"query": {
"match" : {
"message" : {
"query" : "to be or not to be",
"operator" : "and",
"zero_terms_query": "all"
}
}
}
}
--------------------------------------------------
// CONSOLE
[[query-dsl-match-query-cutoff]]
===== Cutoff frequency
@ -110,16 +122,19 @@ Here is an example showing a query composed of stopwords exclusively:
[source,js]
--------------------------------------------------
GET /_search
{
"match" : {
"message" : {
"query" : "to be or not to be",
"cutoff_frequency" : 0.001
"query": {
"match" : {
"message" : {
"query" : "to be or not to be",
"cutoff_frequency" : 0.001
}
}
}
}
--------------------------------------------------
// CONSOLE
IMPORTANT: The `cutoff_frequency` option operates on a per-shard-level. This means
that when trying it out on test indexes with low document numbers you

View File

@ -10,6 +10,7 @@ will work with:
[source,js]
--------------------------------------------------
PUT /my_index
{
"type1" : {
"properties" : {
@ -20,30 +21,36 @@ will work with:
}
}
--------------------------------------------------
// CONSOLE
// TESTSETUP
And here is a sample nested query usage:
[source,js]
--------------------------------------------------
GET /_search
{
"nested" : {
"path" : "obj1",
"score_mode" : "avg",
"query" : {
"bool" : {
"must" : [
{
"match" : {"obj1.name" : "blue"}
},
{
"range" : {"obj1.count" : {"gt" : 5}}
"query": {
"nested" : {
"path" : "obj1",
"score_mode" : "avg",
"query" : {
"bool" : {
"must" : [
{
"match" : {"obj1.name" : "blue"}
},
{
"range" : {"obj1.count" : {"gt" : 5}}
}
]
}
]
}
}
}
}
}
--------------------------------------------------
// CONSOLE
The query `path` points to the nested object path, and the `query`
includes the query that will run on the nested docs matching the

View File

@ -7,6 +7,7 @@ The `parent_id` query can be used to find child documents which belong to a part
[source,js]
--------------------------------------------------
PUT /my_index
{
"parent_id" : {
"type" : "blog_tag",
@ -14,6 +15,8 @@ The `parent_id` query can be used to find child documents which belong to a part
}
}
--------------------------------------------------
// CONSOLE
// TESTSETUP
The above is functionally equivalent to using the following
<<query-dsl-has-parent-query, `has_parent`>> query, but performs
@ -21,17 +24,21 @@ better as it does not need to do a join:
[source,js]
--------------------------------------------------
GET /_search
{
"has_parent": {
"type": "blog",
"query": {
"term": {
"_id": "1"
}
"has_parent": {
"type": "blog",
"query": {
"term": {
"_id": "1"
}
}
}
}
}
}
--------------------------------------------------
// CONSOLE
==== Parameters

View File

@ -13,26 +13,27 @@ Create an index with two mappings:
[source,js]
--------------------------------------------------
curl -XPUT "http://localhost:9200/my-index" -d'
PUT /my-index
{
"mappings": {
"doctype": {
"properties": {
"message": {
"type": "string"
}
}
},
"queries": {
"properties": {
"query": {
"type": "percolator"
}
}
"mappings": {
"doctype": {
"properties": {
"message": {
"type": "string"
}
}
},
"queries": {
"properties": {
"query": {
"type": "percolator"
}
}
}
}
}
}'
}
--------------------------------------------------
// CONSOLE
The `doctype` mapping is the mapping used to preprocess
the document defined in the `percolator` query before it
@ -50,20 +51,24 @@ Register a query in the percolator:
[source,js]
--------------------------------------------------
curl -XPUT 'localhost:9200/my-index/queries/1' -d '{
PUT /my-index/queries/1
{
"query" : {
"match" : {
"message" : "bonsai tree"
}
}
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
Match a document to the registered percolator queries:
[source,js]
--------------------------------------------------
curl -XGET 'localhost:9200/my-index/_search' -d '{
GET /my-index/_search
{
"query" : {
"percolate" : {
"field" : "query",
@ -73,8 +78,10 @@ curl -XGET 'localhost:9200/my-index/_search' -d '{
}
}
}
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
The above request will yield the following response:
@ -151,12 +158,13 @@ Index the document we want to percolate:
[source,js]
--------------------------------------------------
curl -XPUT "http://localhost:9200/my-index/message/1" -d'
PUT /my-index/message/1
{
"message" : "A new bonsai tree in the office"
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
Index response:
[source,js]
@ -179,7 +187,7 @@ Percolating an existing document, using the index response as basis to build to
[source,js]
--------------------------------------------------
curl -XGET "http://localhost:9200/my-index/_search" -d'
GET /my-index/_search
{
"query" : {
"percolate" : {
@ -191,8 +199,10 @@ curl -XGET "http://localhost:9200/my-index/_search" -d'
"version" : 1 <1>
}
}
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
<1> The version is optional, but useful in certain cases. We can then ensure that we are try to percolate
the document we just have indexed. A change may be made after we have indexed, and if that is the
@ -216,35 +226,39 @@ Save a query:
[source,js]
--------------------------------------------------
curl -XPUT "http://localhost:9200/my-index/queries/1" -d'
PUT /my-index/queries/1
{
"query" : {
"match" : {
"message" : "brown fox"
}
}
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
Save another query:
[source,js]
--------------------------------------------------
curl -XPUT "http://localhost:9200/my-index/queries/2" -d'
PUT /my-index/queries/2
{
"query" : {
"match" : {
"message" : "lazy dog"
}
}
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
Execute a search request with the `percolate` query and highlighting enabled:
[source,js]
--------------------------------------------------
curl -XGET "http://localhost:9200/my-index/_search" -d'
GET /my-index/_search
{
"query" : {
"percolate" : {
@ -260,8 +274,10 @@ curl -XGET "http://localhost:9200/my-index/_search" -d'
"message": {}
}
}
}'
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
This will yield the following response.

View File

@ -6,13 +6,17 @@ an example:
[source,js]
--------------------------------------------------
GET /_search
{
"query_string" : {
"default_field" : "content",
"query" : "this AND that OR thus"
"query": {
"query_string" : {
"default_field" : "content",
"query" : "this AND that OR thus"
}
}
}
--------------------------------------------------
// CONSOLE
The `query_string` top level parameters include:
@ -113,25 +117,33 @@ For example, the following query
[source,js]
--------------------------------------------------
GET /_search
{
"query_string" : {
"fields" : ["content", "name"],
"query" : "this AND that"
"query": {
"query_string" : {
"fields" : ["content", "name"],
"query" : "this AND that"
}
}
}
--------------------------------------------------
// CONSOLE
matches the same words as
[source,js]
--------------------------------------------------
GET /_search
{
"query_string": {
"query": "(content:this OR name:this) AND (content:that OR name:that)"
"query": {
"query_string": {
"query": "(content:this OR name:this) AND (content:that OR name:that)"
}
}
}
--------------------------------------------------
// CONSOLE
Since several queries are generated from the individual search terms,
combining them can be automatically done using either a `dis_max` query or a
@ -140,14 +152,18 @@ notation):
[source,js]
--------------------------------------------------
GET /_search
{
"query_string" : {
"fields" : ["content", "name^5"],
"query" : "this AND that OR thus",
"use_dis_max" : true
"query": {
"query_string" : {
"fields" : ["content", "name^5"],
"query" : "this AND that OR thus",
"use_dis_max" : true
}
}
}
--------------------------------------------------
// CONSOLE
Simple wildcard can also be used to search "within" specific inner
elements of the document. For example, if we have a `city` object with
@ -156,14 +172,18 @@ search on all "city" fields:
[source,js]
--------------------------------------------------
GET /_search
{
"query_string" : {
"fields" : ["city.*"],
"query" : "this AND that OR thus",
"use_dis_max" : true
"query": {
"query_string" : {
"fields" : ["city.*"],
"query" : "this AND that OR thus",
"use_dis_max" : true
}
}
}
--------------------------------------------------
// CONSOLE
Another option is to provide the wildcard fields search in the query
string itself (properly escaping the `*` sign), for example:
@ -188,13 +208,17 @@ introduced fields included). For example:
[source,js]
--------------------------------------------------
GET /_search
{
"query_string" : {
"fields" : ["content", "name.*^5"],
"query" : "this AND that OR thus",
"use_dis_max" : true
"query": {
"query_string" : {
"fields" : ["content", "name.*^5"],
"query" : "this AND that OR thus",
"use_dis_max" : true
}
}
}
--------------------------------------------------
// CONSOLE
include::query-string-syntax.asciidoc[]

View File

@ -15,40 +15,52 @@ matchers like `.*?+` will mostly lower performance.
[source,js]
--------------------------------------------------
GET /_search
{
"regexp":{
"name.first": "s.*y"
"query": {
"regexp":{
"name.first": "s.*y"
}
}
}
--------------------------------------------------
// CONSOLE
Boosting is also supported
[source,js]
--------------------------------------------------
GET /_search
{
"regexp":{
"name.first":{
"value":"s.*y",
"boost":1.2
"query": {
"regexp":{
"name.first":{
"value":"s.*y",
"boost":1.2
}
}
}
}
--------------------------------------------------
// CONSOLE
You can also use special flags
[source,js]
--------------------------------------------------
GET /_search
{
"regexp":{
"name.first": {
"value": "s.*y",
"flags" : "INTERSECTION|COMPLEMENT|EMPTY"
"query": {
"regexp":{
"name.first": {
"value": "s.*y",
"flags" : "INTERSECTION|COMPLEMENT|EMPTY"
}
}
}
}
--------------------------------------------------
// CONSOLE
Possible flags are `ALL` (default), `ANYSTRING`, `COMPLEMENT`,
`EMPTY`, `INTERSECTION`, `INTERVAL`, or `NONE`. Please check the
@ -64,16 +76,19 @@ this limit to allow more complex regular expressions to execute.
[source,js]
--------------------------------------------------
GET /_search
{
"regexp":{
"name.first": {
"value": "s.*y",
"flags" : "INTERSECTION|COMPLEMENT|EMPTY",
"max_determinized_states": 20000
"query": {
"regexp":{
"name.first": {
"value": "s.*y",
"flags" : "INTERSECTION|COMPLEMENT|EMPTY",
"max_determinized_states": 20000
}
}
}
}
--------------------------------------------------
// CONSOLE
include::regexp-syntax.asciidoc[]

View File

@ -7,17 +7,20 @@ context, for example:
[source,js]
----------------------------------------------
"bool" : {
"must" : {
...
},
"filter" : {
"script" : {
"script" : "doc['num1'].value > 1"
GET /_search
{
"query": {
"bool" : {
"must" : {
"script" : {
"script" : "doc['num1'].value > 1"
}
}
}
}
}
----------------------------------------------
// CONSOLE
[float]
==== Custom Parameters
@ -28,20 +31,23 @@ to use the ability to pass parameters to the script itself, for example:
[source,js]
----------------------------------------------
"bool" : {
"must" : {
...
},
"filter" : {
"script" : {
"script" : {
"inline" : "doc['num1'].value > param1"
"params" : {
"param1" : 5
GET /_search
{
"query": {
"bool" : {
"must" : {
"script" : {
"script" : {
"inline" : "doc['num1'].value > param1",
"params" : {
"param1" : 5
}
}
}
}
}
}
}
----------------------------------------------
// CONSOLE

View File

@ -67,33 +67,37 @@ possible, reducing the need for networking.
[float]
===== Terms lookup twitter example
At first we index the information for user with id 2, specifically, its
followers, than index a tweet from user with id 1. Finally we search on
all the tweets that match the followers of user 2.
[source,js]
--------------------------------------------------
# index the information for user with id 2, specifically, its followers
curl -XPUT localhost:9200/users/user/2 -d '{
"followers" : ["1", "3"]
}'
PUT /users/user/2
{
"followers" : ["1", "3"]
}
# index a tweet, from user with id 1
curl -XPUT localhost:9200/tweets/tweet/1 -d '{
"user" : "1"
}'
PUT /tweets/tweet/1
{
"user" : "1"
}
# search on all the tweets that match the followers of user 2
curl -XGET localhost:9200/tweets/_search -d '{
"query" : {
"terms" : {
"user" : {
"index" : "users",
"type" : "user",
"id" : "2",
"path" : "followers"
}
GET /tweets/_search
{
"query" : {
"terms" : {
"user" : {
"index" : "users",
"type" : "user",
"id" : "2",
"path" : "followers"
}
}
}
}
}'
}
--------------------------------------------------
// CONSOLE
The structure of the external terms document can also include array of
inner objects, for example:

View File

@ -11,28 +11,40 @@ query maps to Lucene `WildcardQuery`.
[source,js]
--------------------------------------------------
GET /_search
{
"wildcard" : { "user" : "ki*y" }
"query": {
"wildcard" : { "user" : "ki*y" }
}
}
--------------------------------------------------
// CONSOLE
A boost can also be associated with the query:
[source,js]
--------------------------------------------------
GET /_search
{
"wildcard" : { "user" : { "value" : "ki*y", "boost" : 2.0 } }
"query": {
"wildcard" : { "user" : { "value" : "ki*y", "boost" : 2.0 } }
}
}
--------------------------------------------------
// CONSOLE
Or :
[source,js]
--------------------------------------------------
GET /_search
{
"wildcard" : { "user" : { "wildcard" : "ki*y", "boost" : 2.0 } }
"query": {
"wildcard" : { "user" : { "wildcard" : "ki*y", "boost" : 2.0 } }
}
}
--------------------------------------------------
// CONSOLE
This multi term query allows to control how it gets rewritten using the
<<query-dsl-multi-term-rewrite,rewrite>>