Merge branch 'master' into pr/fieldtype-mapper-split
This commit is contained in:
commit
b625ddd17f
|
@ -33,6 +33,7 @@ my @Groups = qw(
|
|||
);
|
||||
my %Group_Labels = (
|
||||
breaking => 'Breaking changes',
|
||||
build => 'Build',
|
||||
deprecation => 'Deprecations',
|
||||
doc => 'Docs',
|
||||
feature => 'New features',
|
||||
|
@ -70,6 +71,14 @@ sub dump_issues {
|
|||
$month++;
|
||||
$year += 1900;
|
||||
|
||||
print <<"HTML";
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
HTML
|
||||
|
||||
for my $group ( @Groups, 'other' ) {
|
||||
my $group_issues = $issues->{$group} or next;
|
||||
print "<h2>$Group_Labels{$group}</h2>\n\n<ul>\n";
|
||||
|
@ -115,6 +124,7 @@ sub dump_issues {
|
|||
print "</ul>";
|
||||
print "\n\n";
|
||||
}
|
||||
print "</body></html>\n";
|
||||
}
|
||||
|
||||
#===================================
|
||||
|
|
|
@ -30,10 +30,10 @@ MetricsAggregationBuilder aggregation =
|
|||
AggregationBuilders
|
||||
.scriptedMetric("agg")
|
||||
.initScript("_agg['heights'] = []")
|
||||
.mapScript("if (doc['gender'].value == \"male\") " +
|
||||
.mapScript(new Script("if (doc['gender'].value == \"male\") " +
|
||||
"{ _agg.heights.add(doc['height'].value) } " +
|
||||
"else " +
|
||||
"{ _agg.heights.add(-1 * doc['height'].value) }");
|
||||
"{ _agg.heights.add(-1 * doc['height'].value) }"));
|
||||
--------------------------------------------------
|
||||
|
||||
You can also specify a `combine` script which will be executed on each shard:
|
||||
|
@ -43,12 +43,12 @@ You can also specify a `combine` script which will be executed on each shard:
|
|||
MetricsAggregationBuilder aggregation =
|
||||
AggregationBuilders
|
||||
.scriptedMetric("agg")
|
||||
.initScript("_agg['heights'] = []")
|
||||
.mapScript("if (doc['gender'].value == \"male\") " +
|
||||
.initScript(new Script("_agg['heights'] = []"))
|
||||
.mapScript(new Script("if (doc['gender'].value == \"male\") " +
|
||||
"{ _agg.heights.add(doc['height'].value) } " +
|
||||
"else " +
|
||||
"{ _agg.heights.add(-1 * doc['height'].value) }")
|
||||
.combineScript("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum");
|
||||
"{ _agg.heights.add(-1 * doc['height'].value) }"))
|
||||
.combineScript(new Script("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum"));
|
||||
--------------------------------------------------
|
||||
|
||||
You can also specify a `reduce` script which will be executed on the node which gets the request:
|
||||
|
@ -58,13 +58,13 @@ You can also specify a `reduce` script which will be executed on the node which
|
|||
MetricsAggregationBuilder aggregation =
|
||||
AggregationBuilders
|
||||
.scriptedMetric("agg")
|
||||
.initScript("_agg['heights'] = []")
|
||||
.mapScript("if (doc['gender'].value == \"male\") " +
|
||||
.initScript(new Script("_agg['heights'] = []"))
|
||||
.mapScript(new Script("if (doc['gender'].value == \"male\") " +
|
||||
"{ _agg.heights.add(doc['height'].value) } " +
|
||||
"else " +
|
||||
"{ _agg.heights.add(-1 * doc['height'].value) }")
|
||||
.combineScript("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum")
|
||||
.reduceScript("heights_sum = 0; for (a in _aggs) { heights_sum += a }; return heights_sum");
|
||||
"{ _agg.heights.add(-1 * doc['height'].value) }"))
|
||||
.combineScript(new Script("heights_sum = 0; for (t in _agg.heights) { heights_sum += t }; return heights_sum"))
|
||||
.reduceScript(new Script("heights_sum = 0; for (a in _aggs) { heights_sum += a }; return heights_sum"));
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ Or you can use `prepareUpdate()` method:
|
|||
[source,java]
|
||||
--------------------------------------------------
|
||||
client.prepareUpdate("ttl", "doc", "1")
|
||||
.setScript("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE)
|
||||
.setScript(new Script("ctx._source.gender = \"male\"" <1> , ScriptService.ScriptType.INLINE, null, null))
|
||||
.get();
|
||||
|
||||
client.prepareUpdate("ttl", "doc", "1")
|
||||
|
@ -46,7 +46,7 @@ The update API allows to update a document based on a script provided:
|
|||
[source,java]
|
||||
--------------------------------------------------
|
||||
UpdateRequest updateRequest = new UpdateRequest("ttl", "doc", "1")
|
||||
.script("ctx._source.gender = \"male\"");
|
||||
.script(new Script("ctx._source.gender = \"male\""));
|
||||
client.update(updateRequest).get();
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -73,8 +73,6 @@ Some aggregations work on values extracted from the aggregated documents. Typica
|
|||
a specific document field which is set using the `field` key for the aggregations. It is also possible to define a
|
||||
<<modules-scripting,`script`>> which will generate the values (per document).
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
|
||||
When both `field` and `script` settings are configured for the aggregation, the script will be treated as a
|
||||
`value script`. While normal scripts are evaluated on a document level (i.e. the script has access to all the data
|
||||
associated with the document), value scripts are evaluated on the *value* level. In this mode, the values are extracted
|
||||
|
|
|
@ -128,8 +128,6 @@ It is also possible to customize the key for each range:
|
|||
|
||||
==== Script
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
|
@ -148,6 +146,33 @@ TIP: The `script` parameter expects an inline script. Use `script_id` for indexe
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"price_ranges" : {
|
||||
"range" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"field": "price"
|
||||
}
|
||||
},
|
||||
"ranges" : [
|
||||
{ "to" : 50 },
|
||||
{ "from" : 50, "to" : 100 },
|
||||
{ "from" : 100 }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
==== Value Script
|
||||
|
||||
Lets say the product prices are in USD but we would like to get the price ranges in EURO. We can use value script to convert the prices prior the aggregation (assuming conversion rate of 0.8)
|
||||
|
|
|
@ -358,13 +358,6 @@ Customized scores can be implemented via a script:
|
|||
--------------------------------------------------
|
||||
|
||||
Scripts can be inline (as in above example), indexed or stored on disk. For details on the options, see <<modules-scripting, script documentation>>.
|
||||
Parameters need to be set as follows:
|
||||
|
||||
[horizontal]
|
||||
`script`:: Inline script, name of script file or name of indexed script. Mandatory.
|
||||
`script_type`:: One of "inline" (default), "indexed" or "file".
|
||||
`lang`:: Script language (default "groovy")
|
||||
`params`:: Script parameters (default empty).
|
||||
|
||||
Available parameters in the script are
|
||||
|
||||
|
|
|
@ -441,7 +441,27 @@ Generating the terms using a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"genders" : {
|
||||
"terms" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"field": "gender"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
|
||||
==== Value Script
|
||||
|
|
|
@ -47,7 +47,29 @@ Computing the average grade based on a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...,
|
||||
|
||||
"aggs" : {
|
||||
"avg_grade" : {
|
||||
"avg" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"field": "grade"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
===== Value Script
|
||||
|
||||
|
@ -63,9 +85,11 @@ It turned out that the exam was way above the level of the students and a grade
|
|||
"avg_corrected_grade" : {
|
||||
"avg" : {
|
||||
"field" : "grade",
|
||||
"script" : "_value * correction",
|
||||
"params" : {
|
||||
"correction" : 1.2
|
||||
"script" : {
|
||||
"inline": "_value * correction",
|
||||
"params" : {
|
||||
"correction" : 1.2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -153,7 +153,28 @@ however since hashes need to be computed on the fly.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"author_count" : {
|
||||
"cardinality" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"first_name_field": "author.first_name",
|
||||
"last_name_field": "author.last_name"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
==== Missing value
|
||||
|
||||
|
|
|
@ -91,7 +91,29 @@ Computing the grades stats based on a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...,
|
||||
|
||||
"aggs" : {
|
||||
"grades_stats" : {
|
||||
"extended_stats" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"field": "grade"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
===== Value Script
|
||||
|
||||
|
@ -107,9 +129,11 @@ It turned out that the exam was way above the level of the students and a grade
|
|||
"grades_stats" : {
|
||||
"extended_stats" : {
|
||||
"field" : "grade",
|
||||
"script" : "_value * correction",
|
||||
"params" : {
|
||||
"correction" : 1.2
|
||||
"script" : {
|
||||
"inline": "_value * correction",
|
||||
"params" : {
|
||||
"correction" : 1.2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,27 @@ Computing the max price value across all document, this time using a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"max_price" : {
|
||||
"max" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"field": "price"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
==== Value Script
|
||||
|
||||
|
@ -57,9 +77,11 @@ Let's say that the prices of the documents in our index are in USD, but we would
|
|||
"max_price_in_euros" : {
|
||||
"max" : {
|
||||
"field" : "price",
|
||||
"script" : "_value * conversion_rate",
|
||||
"params" : {
|
||||
"conversion_rate" : 1.2
|
||||
"script" : {
|
||||
"inline": "_value * conversion_rate",
|
||||
"params" : {
|
||||
"conversion_rate" : 1.2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,27 @@ Computing the min price value across all document, this time using a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"min_price" : {
|
||||
"min" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"field": "price"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
==== Value Script
|
||||
|
||||
|
@ -57,9 +77,11 @@ Let's say that the prices of the documents in our index are in USD, but we would
|
|||
"min_price_in_euros" : {
|
||||
"min" : {
|
||||
"field" : "price",
|
||||
"script" : "_value * conversion_rate",
|
||||
"params" : {
|
||||
"conversion_rate" : 1.2
|
||||
"script" :
|
||||
"inline": "_value * conversion_rate",
|
||||
"params" : {
|
||||
"conversion_rate" : 1.2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,9 +100,11 @@ a script to convert them on-the-fly:
|
|||
"aggs" : {
|
||||
"load_time_outlier" : {
|
||||
"percentiles" : {
|
||||
"script" : "doc['load_time'].value / timeUnit", <1>
|
||||
"params" : {
|
||||
"timeUnit" : 1000 <2>
|
||||
"script" : {
|
||||
"inline": "doc['load_time'].value / timeUnit", <1>
|
||||
"params" : {
|
||||
"timeUnit" : 1000 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -113,7 +115,27 @@ a script to convert them on-the-fly:
|
|||
script to generate values which percentiles are calculated on
|
||||
<2> Scripting supports parameterized input just like any other script
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"load_time_outlier" : {
|
||||
"percentiles" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params" : {
|
||||
"timeUnit" : 1000
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
[[search-aggregations-metrics-percentile-aggregation-approximation]]
|
||||
==== Percentiles are (usually) approximate
|
||||
|
|
|
@ -72,9 +72,11 @@ a script to convert them on-the-fly:
|
|||
"load_time_outlier" : {
|
||||
"percentile_ranks" : {
|
||||
"values" : [3, 5],
|
||||
"script" : "doc['load_time'].value / timeUnit", <1>
|
||||
"params" : {
|
||||
"timeUnit" : 1000 <2>
|
||||
"script" : {
|
||||
"inline": "doc['load_time'].value / timeUnit", <1>
|
||||
"params" : {
|
||||
"timeUnit" : 1000 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -85,7 +87,28 @@ a script to convert them on-the-fly:
|
|||
script to generate values which percentile ranks are calculated on
|
||||
<2> Scripting supports parameterized input just like any other script
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"aggs" : {
|
||||
"load_time_outlier" : {
|
||||
"percentile_ranks" : {
|
||||
"values" : [3, 5],
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params" : {
|
||||
"timeUnit" : 1000
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
==== Missing value
|
||||
|
||||
|
@ -108,3 +131,4 @@ had a value.
|
|||
--------------------------------------------------
|
||||
|
||||
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.
|
||||
|
||||
|
|
|
@ -45,6 +45,42 @@ The response for the above aggregation:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The above example can also be specified using file scripts as follows:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"query" : {
|
||||
"match_all" : {}
|
||||
},
|
||||
"aggs": {
|
||||
"profit": {
|
||||
"scripted_metric": {
|
||||
"init_script" : {
|
||||
"file": "my_init_script"
|
||||
},
|
||||
"map_script" : {
|
||||
"file": "my_map_script"
|
||||
},
|
||||
"combine_script" : {
|
||||
"file": "my_combine_script"
|
||||
},
|
||||
"params": {
|
||||
"field": "amount" <1>
|
||||
},
|
||||
"reduce_script" : {
|
||||
"file": "my_reduce_script"
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
<1> script parameters for init, map and combine scripts must be specified in a global `params` object so that it can be share between the scripts
|
||||
|
||||
For more details on specifying scripts see <<modules-scripting, script documentation>>.
|
||||
|
||||
==== Scope of scripts
|
||||
|
||||
The scripted metric aggregation uses scripts at 4 stages of its execution:
|
||||
|
@ -225,13 +261,4 @@ params:: Optional. An object whose contents will be passed as variable
|
|||
--------------------------------------------------
|
||||
reduce_params:: Optional. An object whose contents will be passed as variables to the `reduce_script`. This can be useful to allow the user to control
|
||||
the behavior of the reduce phase. If this is not specified the variable will be undefined in the reduce_script execution.
|
||||
lang:: Optional. The script language used for the scripts. If this is not specified the default scripting language is used.
|
||||
init_script_file:: Optional. Can be used in place of the `init_script` parameter to provide the script using in a file.
|
||||
init_script_id:: Optional. Can be used in place of the `init_script` parameter to provide the script using an indexed script.
|
||||
map_script_file:: Optional. Can be used in place of the `map_script` parameter to provide the script using in a file.
|
||||
map_script_id:: Optional. Can be used in place of the `map_script` parameter to provide the script using an indexed script.
|
||||
combine_script_file:: Optional. Can be used in place of the `combine_script` parameter to provide the script using in a file.
|
||||
combine_script_id:: Optional. Can be used in place of the `combine_script` parameter to provide the script using an indexed script.
|
||||
reduce_script_file:: Optional. Can be used in place of the `reduce_script` parameter to provide the script using in a file.
|
||||
reduce_script_id:: Optional. Can be used in place of the `reduce_script` parameter to provide the script using an indexed script.
|
||||
|
||||
|
|
|
@ -53,7 +53,29 @@ Computing the grades stats based on a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...,
|
||||
|
||||
"aggs" : {
|
||||
"grades_stats" : {
|
||||
"stats" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params" : {
|
||||
"field" : "grade"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
===== Value Script
|
||||
|
||||
|
@ -69,9 +91,11 @@ It turned out that the exam was way above the level of the students and a grade
|
|||
"grades_stats" : {
|
||||
"stats" : {
|
||||
"field" : "grade",
|
||||
"script" : "_value * correction",
|
||||
"params" : {
|
||||
"correction" : 1.2
|
||||
"script" :
|
||||
"inline": "_value * correction",
|
||||
"params" : {
|
||||
"correction" : 1.2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,7 +55,29 @@ Computing the intraday return based on a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...,
|
||||
|
||||
"aggs" : {
|
||||
"intraday_return" : {
|
||||
"sum" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params" : {
|
||||
"field" : "change"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
||||
===== Value Script
|
||||
|
||||
|
@ -71,7 +93,8 @@ Computing the sum of squares over all stock tick changes:
|
|||
"daytime_return" : {
|
||||
"sum" : {
|
||||
"field" : "change",
|
||||
"script" : "_value * _value" }
|
||||
"script" : "_value * _value"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,4 +48,26 @@ Counting the values generated by a script:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
|
||||
This will interpret the `script` parameter as an `inline` script with the default script language and no script parameters. To use a file script use the following syntax:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...,
|
||||
|
||||
"aggs" : {
|
||||
"grades_count" : {
|
||||
"value_count" : {
|
||||
"script" : {
|
||||
"file": "my_script",
|
||||
"params" : {
|
||||
"field" : "grade"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
||||
|
|
|
@ -187,7 +187,7 @@ the options. Curl example with update actions:
|
|||
{ "update" : {"_id" : "1", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
|
||||
{ "doc" : {"field" : "value"} }
|
||||
{ "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
|
||||
{ "script" : "ctx._source.counter += param1", "lang" : "js", "params" : {"param1" : 1}, "upsert" : {"counter" : 1}}
|
||||
{ "script" : { "inline": "ctx._source.counter += param1", "lang" : "js", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}}
|
||||
{ "update" : {"_id" : "2", "_type" : "type1", "_index" : "index1", "_retry_on_conflict" : 3} }
|
||||
{ "doc" : {"field" : "value"}, "doc_as_upsert" : true }
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -28,9 +28,11 @@ Now, we can execute a script that would increment the counter:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
|
||||
"script" : "ctx._source.counter += count",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
"script" : {
|
||||
"inline": "ctx._source.counter += count",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
}
|
||||
}
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
@ -41,9 +43,11 @@ will still add it, since its a list):
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
|
||||
"script" : "ctx._source.tags += tag",
|
||||
"params" : {
|
||||
"tag" : "blue"
|
||||
"script" : {
|
||||
"inline": "ctx._source.tags += tag",
|
||||
"params" : {
|
||||
"tag" : "blue"
|
||||
}
|
||||
}
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
@ -71,9 +75,11 @@ And, we can delete the doc if the tags contain blue, or ignore (noop):
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
|
||||
"script" : "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"",
|
||||
"params" : {
|
||||
"tag" : "blue"
|
||||
"script" : {
|
||||
"inline": "ctx._source.tags.contains(tag) ? ctx.op = \"delete\" : ctx.op = \"none\"",
|
||||
"params" : {
|
||||
"tag" : "blue"
|
||||
}
|
||||
}
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
@ -136,9 +142,11 @@ index the fresh doc:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
|
||||
"script" : "ctx._source.counter += count",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
"script" : {
|
||||
"inline": "ctx._source.counter += count",
|
||||
"params" : {
|
||||
"count" : 4
|
||||
}
|
||||
},
|
||||
"upsert" : {
|
||||
"counter" : 1
|
||||
|
@ -153,13 +161,15 @@ new `scripted_upsert` parameter with the value `true`.
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/sessions/session/dh3sgudg8gsrgl/_update' -d '{
|
||||
"script_id" : "my_web_session_summariser",
|
||||
"scripted_upsert":true,
|
||||
"params" : {
|
||||
"pageViewEvent" : {
|
||||
"url":"foo.com/bar",
|
||||
"response":404,
|
||||
"time":"2014-01-01 12:32"
|
||||
"script" : {
|
||||
"id": "my_web_session_summariser",
|
||||
"params" : {
|
||||
"pageViewEvent" : {
|
||||
"url":"foo.com/bar",
|
||||
"response":404,
|
||||
"time":"2014-01-01 12:32"
|
||||
}
|
||||
}
|
||||
},
|
||||
"upsert" : {
|
||||
|
|
|
@ -566,7 +566,7 @@ Which means that we just successfully bulk indexed 1000 documents into the bank
|
|||
|
||||
=== The Search API
|
||||
|
||||
Now let's start with some simple searches. There are two basic ways to run searches: one is by sending search parameters through the <<search-uri-request,REST request URI>> and the other by sending them through the<<search-request-body,[REST request body>>. The request body method allows you to be more expressive and also to define your searches in a more readable JSON format. We'll try one example of the request URI method but for the remainder of this tutorial, we will exclusively be using the request body method.
|
||||
Now let's start with some simple searches. There are two basic ways to run searches: one is by sending search parameters through the <<search-uri-request,REST request URI>> and the other by sending them through the <<search-request-body,REST request body>>. The request body method allows you to be more expressive and also to define your searches in a more readable JSON format. We'll try one example of the request URI method but for the remainder of this tutorial, we will exclusively be using the request body method.
|
||||
|
||||
The REST API for search is accessible from the `_search` endpoint. This example returns all documents in the bank index:
|
||||
|
||||
|
|
|
@ -149,6 +149,7 @@ field data format.
|
|||
Computes and stores field data data-structures on disk at indexing time.
|
||||
|
||||
[float]
|
||||
[[global-ordinals]]
|
||||
==== Global ordinals
|
||||
|
||||
Global ordinals is a data-structure on top of field data, that maintains an
|
||||
|
@ -182,6 +183,7 @@ ordinals is a small because it is very efficiently compressed. Eager loading of
|
|||
can move the loading time from the first search request, to the refresh itself.
|
||||
|
||||
[float]
|
||||
[[fielddata-loading]]
|
||||
=== Fielddata loading
|
||||
|
||||
By default, field data is loaded lazily, ie. the first time that a query that
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
[[mapping-parent-field]]
|
||||
=== `_parent`
|
||||
|
||||
TIP: It is highly recommend to reindex all indices with `_parent` field created before version 2.x.
|
||||
The reason for this is to gain from all the optimizations added with the 2.0 release.
|
||||
|
||||
The parent field mapping is defined on a child mapping, and points to
|
||||
the parent type this child relates to. For example, in case of a `blog`
|
||||
type and a `blog_tag` type child document, the mapping for `blog_tag`
|
||||
|
@ -20,8 +23,34 @@ should be:
|
|||
The mapping is automatically stored and indexed (meaning it can be
|
||||
searched on using the `_parent` field notation).
|
||||
|
||||
==== Field data loading
|
||||
==== Limitations
|
||||
|
||||
Contrary to other fields the fielddata loading is not `lazy`, but `eager`. The reason for this is that when this
|
||||
field has been enabled it is going to be used in parent/child queries, which heavily relies on field data to perform
|
||||
efficiently. This can already be observed during indexing after refresh either automatically or manually has been executed.
|
||||
The `_parent.type` setting can only point to a type that doesn't exist yet.
|
||||
This means that a type can't become a parent type after is has been created.
|
||||
|
||||
The `parent.type` setting can't point to itself. This means self referential
|
||||
parent/child isn't supported.
|
||||
|
||||
Parent/child queries (`has_child` & `has_parent`) can't be used in index aliases.
|
||||
|
||||
==== Global ordinals
|
||||
|
||||
Parent-child uses <<global-ordinals,global ordinals>> to speed up joins and global ordinals need to be rebuilt after any change to a shard.
|
||||
The more parent id values are stored in a shard, the longer it takes to rebuild global ordinals for the `_parent` field.
|
||||
|
||||
Global ordinals, by default, are built lazily: the first parent-child query or aggregation after a refresh will trigger building of global ordinals.
|
||||
This can introduce a significant latency spike for your users. You can use <<fielddata-loading,eager_global_ordinals>> to shift the cost of building global ordinals
|
||||
from query time to refresh time, by mapping the _parent field as follows:
|
||||
|
||||
==== Memory usage
|
||||
|
||||
The only on heap memory used by parent/child is the global ordinals for the `_parent` field.
|
||||
|
||||
How much memory is used for the global ordianls for the `_parent` field in the fielddata cache
|
||||
can be checked via the <<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>>
|
||||
APIS, eg:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET "http://localhost:9200/_stats/fielddata?pretty&human&fielddata_fields=_parent"
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -10,11 +10,13 @@ field. Example:
|
|||
{
|
||||
"example" : {
|
||||
"transform" : {
|
||||
"script" : "if (ctx._source['title']?.startsWith('t')) ctx._source['suggest'] = ctx._source['content']",
|
||||
"params" : {
|
||||
"variable" : "not used but an example anyway"
|
||||
},
|
||||
"lang": "groovy"
|
||||
"script" : {
|
||||
"inline": "if (ctx._source['title']?.startsWith('t')) ctx._source['suggest'] = ctx._source['content']",
|
||||
"params" : {
|
||||
"variable" : "not used but an example anyway"
|
||||
},
|
||||
"lang": "groovy"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"title": { "type": "string" },
|
||||
|
|
|
@ -626,3 +626,19 @@ anymore, it will only highlight fields that were queried.
|
|||
The `match` query with type set to `match_phrase_prefix` is not supported by the
|
||||
postings highlighter. No highlighted snippets will be returned.
|
||||
|
||||
[float]
|
||||
=== Parent/child
|
||||
|
||||
Parent/child has been rewritten completely to reduce memory usage and to execute
|
||||
`has_child` and `has_parent` queries faster and more efficient. The `_parent` field
|
||||
uses doc values by default. The refactored and improved implementation is only active
|
||||
for indices created on or after version 2.0.
|
||||
|
||||
In order to benefit for all performance and memory improvements we recommend to reindex all
|
||||
indices that have the `_parent` field created before was upgraded to 2.0.
|
||||
|
||||
The following breaks in backwards compatability have been made on indices with the `_parent` field
|
||||
created on or after clusters with version 2.0:
|
||||
* The `type` option on the `_parent` field can only point to a parent type that doesn't exist yet,
|
||||
so this means that an existing type/mapping can no longer become a parent type.
|
||||
* The `has_child` and `has_parent` queries can no longer be use in alias filters.
|
|
@ -29,7 +29,7 @@ GET /_search
|
|||
{
|
||||
"script_fields": {
|
||||
"my_field": {
|
||||
"script": "1 + my_var",
|
||||
"inline": "1 + my_var",
|
||||
"params": {
|
||||
"my_var": 2
|
||||
}
|
||||
|
@ -38,7 +38,7 @@ GET /_search
|
|||
}
|
||||
-----------------------------------
|
||||
|
||||
Save the contents of the script as a file called `config/scripts/my_script.groovy`
|
||||
Save the contents of the `inline` field as a file called `config/scripts/my_script.groovy`
|
||||
on every data node in the cluster:
|
||||
|
||||
[source,js]
|
||||
|
@ -54,7 +54,7 @@ GET /_search
|
|||
{
|
||||
"script_fields": {
|
||||
"my_field": {
|
||||
"script_file": "my_script",
|
||||
"file": "my_script",
|
||||
"params": {
|
||||
"my_var": 2
|
||||
}
|
||||
|
@ -67,9 +67,9 @@ GET /_search
|
|||
|
||||
|
||||
Additional `lang` plugins are provided to allow to execute scripts in
|
||||
different languages. All places where a `script` parameter can be used, a `lang` parameter
|
||||
(on the same level) can be provided to define the language of the
|
||||
script. The following are the supported scripting languages:
|
||||
different languages. All places where a script can be used, a `lang` parameter
|
||||
can be provided to define the language of the script. The following are the
|
||||
supported scripting languages:
|
||||
|
||||
[cols="<,<,<",options="header",]
|
||||
|=======================================================================
|
||||
|
@ -120,7 +120,7 @@ curl -XPOST localhost:9200/_search -d '{
|
|||
{
|
||||
"script_score": {
|
||||
"lang": "groovy",
|
||||
"script_file": "calculate-score",
|
||||
"file": "calculate-score",
|
||||
"params": {
|
||||
"my_modifier": 8
|
||||
}
|
||||
|
@ -162,8 +162,8 @@ curl -XPOST localhost:9200/_scripts/groovy/indexedCalculateScore -d '{
|
|||
This will create a document with id: `indexedCalculateScore` and type: `groovy` in the
|
||||
`.scripts` index. The type of the document is the language used by the script.
|
||||
|
||||
This script can be accessed at query time by appending `_id` to
|
||||
the script parameter and passing the script id. So `script` becomes `script_id`.:
|
||||
This script can be accessed at query time by using the `id` script parameter and passing
|
||||
the script id:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -178,7 +178,7 @@ curl -XPOST localhost:9200/_search -d '{
|
|||
"functions": [
|
||||
{
|
||||
"script_score": {
|
||||
"script_id": "indexedCalculateScore",
|
||||
"id": "indexedCalculateScore",
|
||||
"lang" : "groovy",
|
||||
"params": {
|
||||
"my_modifier": 8
|
||||
|
|
|
@ -120,12 +120,14 @@ script, and provide parameters to it:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
"script_score": {
|
||||
"lang": "lang",
|
||||
"params": {
|
||||
"param1": value1,
|
||||
"param2": value2
|
||||
},
|
||||
"script": "_score * doc['my_numeric_field'].value / pow(param1, param2)"
|
||||
"script": {
|
||||
"lang": "lang",
|
||||
"params": {
|
||||
"param1": value1,
|
||||
"param2": value2
|
||||
},
|
||||
"inline": "_score * doc['my_numeric_field'].value / pow(param1, param2)"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -72,21 +72,3 @@ a match:
|
|||
|
||||
The `min_children` and `max_children` parameters can be combined with
|
||||
the `score_mode` parameter.
|
||||
|
||||
[float]
|
||||
=== Memory Considerations
|
||||
|
||||
In order to support parent-child joins, all of the (string) parent IDs
|
||||
must be resident in memory (in the <<index-modules-fielddata,field data cache>>.
|
||||
Additionally, every child document is mapped to its parent using a long
|
||||
value (approximately). It is advisable to keep the string parent ID short
|
||||
in order to reduce memory usage.
|
||||
|
||||
You can check how much memory is being used by the `_parent` field in the fielddata cache
|
||||
using the <<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>>
|
||||
APIS, eg:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET "http://localhost:9200/_stats/fielddata?pretty&human&fielddata_fields=_parent"
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -47,23 +47,3 @@ matching parent document. The score type can be specified with the
|
|||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
[float]
|
||||
=== Memory Considerations
|
||||
|
||||
In order to support parent-child joins, all of the (string) parent IDs
|
||||
must be resident in memory (in the <<index-modules-fielddata,field data cache>>.
|
||||
Additionally, every child document is mapped to its parent using a long
|
||||
value (approximately). It is advisable to keep the string parent ID short
|
||||
in order to reduce memory usage.
|
||||
|
||||
You can check how much memory is being used by the `_parent` field in the fielddata cache
|
||||
using the <<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>>
|
||||
APIS, eg:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET "http://localhost:9200/_stats/fielddata?pretty&human&fielddata_fields=_parent"
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
|
|
|
@ -34,9 +34,11 @@ to use the ability to pass parameters to the script itself, for example:
|
|||
},
|
||||
"filter" : {
|
||||
"script" : {
|
||||
"script" : "doc['num1'].value > param1"
|
||||
"params" : {
|
||||
"param1" : 5
|
||||
"script" : {
|
||||
"inline" : "doc['num1'].value > param1"
|
||||
"params" : {
|
||||
"param1" : 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ GET /_search
|
|||
{
|
||||
"query": {
|
||||
"template": {
|
||||
"query": { "match": { "text": "{{query_string}}" }},
|
||||
"inline": { "match": { "text": "{{query_string}}" }},
|
||||
"params" : {
|
||||
"query_string" : "all about search"
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ GET /_search
|
|||
{
|
||||
"query": {
|
||||
"template": {
|
||||
"query": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1>
|
||||
"inline": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1>
|
||||
"params" : {
|
||||
"query_string" : "all about search"
|
||||
}
|
||||
|
|
|
@ -15,9 +15,11 @@ evaluation>> (based on different fields) for each hit, for example:
|
|||
"script" : "doc['my_field_name'].value * 2"
|
||||
},
|
||||
"test2" : {
|
||||
"script" : "doc['my_field_name'].value * factor",
|
||||
"params" : {
|
||||
"factor" : 2.0
|
||||
"script" : {
|
||||
"inline": "doc['my_field_name'].value * factor",
|
||||
"params" : {
|
||||
"factor" : 2.0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -318,10 +318,12 @@ Allow to sort based on custom scripts, here is an example:
|
|||
},
|
||||
"sort" : {
|
||||
"_script" : {
|
||||
"script" : "doc['field_name'].value * factor",
|
||||
"type" : "number",
|
||||
"params" : {
|
||||
"factor" : 1.1
|
||||
"script" : {
|
||||
"inline": "doc['field_name'].value * factor",
|
||||
"params" : {
|
||||
"factor" : 1.1
|
||||
}
|
||||
},
|
||||
"order" : "asc"
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ before they are executed and fill existing templates with template parameters.
|
|||
------------------------------------------
|
||||
GET /_search/template
|
||||
{
|
||||
"template" : {
|
||||
"inline" : {
|
||||
"query": { "match" : { "{{my_field}}" : "{{my_value}}" } },
|
||||
"size" : "{{my_size}}"
|
||||
},
|
||||
|
@ -40,7 +40,7 @@ disable scripts per language, source and operation as described in
|
|||
------------------------------------------
|
||||
GET /_search/template
|
||||
{
|
||||
"template": {
|
||||
"inline": {
|
||||
"query": {
|
||||
"match": {
|
||||
"title": "{{query_string}}"
|
||||
|
@ -60,7 +60,7 @@ GET /_search/template
|
|||
------------------------------------------
|
||||
GET /_search/template
|
||||
{
|
||||
"template": {
|
||||
"inline": {
|
||||
"query": {
|
||||
"terms": {
|
||||
"status": [
|
||||
|
@ -97,7 +97,7 @@ A default value is written as `{{var}}{{^var}}default{{/var}}` for instance:
|
|||
[source,js]
|
||||
------------------------------------------
|
||||
{
|
||||
"template": {
|
||||
"inline": {
|
||||
"query": {
|
||||
"range": {
|
||||
"line_no": {
|
||||
|
@ -212,7 +212,7 @@ via the REST API, should be written as a string:
|
|||
|
||||
[source,json]
|
||||
--------------------
|
||||
"template": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}"
|
||||
"inline": "{\"query\":{\"filtered\":{\"query\":{\"match\":{\"line\":\"{{text}}\"}},\"filter\":{{{#line_no}}\"range\":{\"line_no\":{{{#start}}\"gte\":\"{{start}}\"{{#end}},{{/end}}{{/start}}{{#end}}\"lte\":\"{{end}}\"{{/end}}}}{{/line_no}}}}}}"
|
||||
--------------------
|
||||
|
||||
==================================
|
||||
|
@ -229,9 +229,7 @@ In order to execute the stored template, reference it by it's name under the `te
|
|||
------------------------------------------
|
||||
GET /_search/template
|
||||
{
|
||||
"template": {
|
||||
"file": "storedTemplate" <1>
|
||||
},
|
||||
"file": "storedTemplate", <1>
|
||||
"params": {
|
||||
"query_string": "search for these words"
|
||||
}
|
||||
|
@ -293,9 +291,7 @@ To use an indexed template at search time use:
|
|||
------------------------------------------
|
||||
GET /_search/template
|
||||
{
|
||||
"template": {
|
||||
"id": "templateName" <1>
|
||||
},
|
||||
"id": "templateName", <1>
|
||||
"params": {
|
||||
"query_string": "search for these words"
|
||||
}
|
||||
|
|
|
@ -162,13 +162,13 @@ can contain misspellings (See parameter descriptions below).
|
|||
is wrapped rather than each token.
|
||||
|
||||
`collate`::
|
||||
Checks each suggestion against the specified `query` or `filter` to
|
||||
prune suggestions for which no matching docs exist in the index.
|
||||
The collate query for a suggestion is run only on the local shard from which
|
||||
the suggestion has been generated from. Either a `query` or a `filter` must
|
||||
be specified, and it is run as a <<query-dsl-template-query,`template` query>>.
|
||||
Checks each suggestion against the specified `query` to prune suggestions
|
||||
for which no matching docs exist in the index. The collate query for a
|
||||
suggestion is run only on the local shard from which the suggestion has
|
||||
been generated from. The `query` must be specified, and it is run as
|
||||
a <<query-dsl-template-query,`template` query>>.
|
||||
The current suggestion is automatically made available as the `{{suggestion}}`
|
||||
variable, which should be used in your query/filter. You can still specify
|
||||
variable, which should be used in your query. You can still specify
|
||||
your own template `params` -- the `suggestion` value will be added to the
|
||||
variables you specify. Additionally, you can specify a `prune` to control
|
||||
if all phrase suggestions will be returned, when set to `true` the suggestions
|
||||
|
|
|
@ -264,6 +264,29 @@ file which will resolve to an environment setting, for example:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Additionally, for settings that you do not wish to store in the configuration
|
||||
file, you can use the value `${prompt::text}` or `${prompt::secret}` and start
|
||||
Elasticsearch in the foreground. `${prompt::secret}` has echoing disabled so
|
||||
that the value entered will not be shown in your terminal; `${prompt::text}`
|
||||
will allow you to see the value as you type it in. For example:
|
||||
|
||||
[source,yaml]
|
||||
--------------------------------------------------
|
||||
node:
|
||||
name: ${prompt::text}
|
||||
--------------------------------------------------
|
||||
|
||||
On execution of the `elasticsearch` command, you will be prompted to enter
|
||||
the actual value like so:
|
||||
|
||||
[source,sh]
|
||||
--------------------------------------------------
|
||||
Enter value for [node.name]:
|
||||
--------------------------------------------------
|
||||
|
||||
NOTE: Elasticsearch will not start if `${prompt::text}` or `${prompt::secret}`
|
||||
is used in the settings and the process is run as a service or in the background.
|
||||
|
||||
The location of the configuration file can be set externally using a
|
||||
system property:
|
||||
|
||||
|
|
|
@ -20,7 +20,12 @@
|
|||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.action.*;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.CompositeIndicesRequest;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
|
@ -140,7 +145,7 @@ public class BulkRequest extends ActionRequest<BulkRequest> implements Composite
|
|||
sizeInBytes += request.upsertRequest().source().length();
|
||||
}
|
||||
if (request.script() != null) {
|
||||
sizeInBytes += request.script().length() * 2;
|
||||
sizeInBytes += request.script().getScript().length() * 2;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -35,11 +35,13 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.search.Scroll.readScroll;
|
||||
|
@ -69,9 +71,7 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
private String preference;
|
||||
|
||||
private BytesReference templateSource;
|
||||
private String templateName;
|
||||
private ScriptService.ScriptType templateType;
|
||||
private Map<String, Object> templateParams = Collections.emptyMap();
|
||||
private Template template;
|
||||
|
||||
private BytesReference source;
|
||||
|
||||
|
@ -100,9 +100,7 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
this.routing = searchRequest.routing;
|
||||
this.preference = searchRequest.preference;
|
||||
this.templateSource = searchRequest.templateSource;
|
||||
this.templateName = searchRequest.templateName;
|
||||
this.templateType = searchRequest.templateType;
|
||||
this.templateParams = searchRequest.templateParams;
|
||||
this.template = searchRequest.template;
|
||||
this.source = searchRequest.source;
|
||||
this.extraSource = searchRequest.extraSource;
|
||||
this.queryCache = searchRequest.queryCache;
|
||||
|
@ -390,42 +388,92 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
}
|
||||
|
||||
/**
|
||||
* The name of the stored template
|
||||
* The stored template
|
||||
*/
|
||||
public void template(Template template) {
|
||||
this.template = template;
|
||||
}
|
||||
|
||||
/**
|
||||
* The stored template
|
||||
*/
|
||||
public Template template() {
|
||||
return template;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the stored template
|
||||
*
|
||||
* @deprecated use {@link #template(Template))} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public void templateName(String templateName) {
|
||||
this.templateName = templateName;
|
||||
updateOrCreateScript(templateName, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of the stored template
|
||||
*
|
||||
* @deprecated use {@link #template(Template))} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public void templateType(ScriptService.ScriptType templateType) {
|
||||
this.templateType = templateType;
|
||||
updateOrCreateScript(null, templateType, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Template parameters used for rendering
|
||||
*
|
||||
* @deprecated use {@link #template(Template))} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public void templateParams(Map<String, Object> params) {
|
||||
this.templateParams = params;
|
||||
updateOrCreateScript(null, null, null, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the stored template
|
||||
*
|
||||
* @deprecated use {@link #template()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public String templateName() {
|
||||
return templateName;
|
||||
return template == null ? null : template.getScript();
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the stored template
|
||||
*
|
||||
* @deprecated use {@link #template()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public ScriptService.ScriptType templateType() {
|
||||
return templateType;
|
||||
return template == null ? null : template.getType();
|
||||
}
|
||||
|
||||
/**
|
||||
* Template parameters used for rendering
|
||||
*
|
||||
* @deprecated use {@link #template()} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public Map<String, Object> templateParams() {
|
||||
return templateParams;
|
||||
return template == null ? null : template.getParams();
|
||||
}
|
||||
|
||||
private void updateOrCreateScript(String templateContent, ScriptType type, String lang, Map<String, Object> params) {
|
||||
Template template = template();
|
||||
if (template == null) {
|
||||
template = new Template(templateContent == null ? "" : templateContent, type == null ? ScriptType.INLINE : type, lang, null,
|
||||
params);
|
||||
} else {
|
||||
String newTemplateContent = templateContent == null ? template.getScript() : templateContent;
|
||||
ScriptType newTemplateType = type == null ? template.getType() : type;
|
||||
String newTemplateLang = lang == null ? template.getLang() : lang;
|
||||
Map<String, Object> newTemplateParams = params == null ? template.getParams() : params;
|
||||
template = new Template(newTemplateContent, newTemplateType, MustacheScriptEngineService.NAME, null, newTemplateParams);
|
||||
}
|
||||
template(template);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -517,10 +565,8 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
|
||||
templateSource = in.readBytesReference();
|
||||
templateName = in.readOptionalString();
|
||||
templateType = ScriptService.ScriptType.readFrom(in);
|
||||
if (in.readBoolean()) {
|
||||
templateParams = (Map<String, Object>) in.readGenericValue();
|
||||
template = Template.readTemplate(in);
|
||||
}
|
||||
queryCache = in.readOptionalBoolean();
|
||||
}
|
||||
|
@ -550,12 +596,10 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
indicesOptions.writeIndicesOptions(out);
|
||||
|
||||
out.writeBytesReference(templateSource);
|
||||
out.writeOptionalString(templateName);
|
||||
ScriptService.ScriptType.writeTo(templateType, out);
|
||||
boolean existTemplateParams = templateParams != null;
|
||||
out.writeBoolean(existTemplateParams);
|
||||
if (existTemplateParams) {
|
||||
out.writeGenericValue(templateParams);
|
||||
boolean hasTemplate = template != null;
|
||||
out.writeBoolean(hasTemplate);
|
||||
if (hasTemplate) {
|
||||
template.writeTo(out);
|
||||
}
|
||||
|
||||
out.writeOptionalBoolean(queryCache);
|
||||
|
|
|
@ -29,7 +29,9 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
@ -423,33 +425,60 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
* @param name The name that will represent this value in the return hit
|
||||
* @param script The script to use
|
||||
*/
|
||||
public SearchRequestBuilder addScriptField(String name, Script script) {
|
||||
sourceBuilder().scriptField(name, script);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script based field to load and return. The field does not have to
|
||||
* be stored, but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name
|
||||
* The name that will represent this value in the return hit
|
||||
* @param script
|
||||
* The script to use
|
||||
* @deprecated Use {@link #addScriptField(String, Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder addScriptField(String name, String script) {
|
||||
sourceBuilder().scriptField(name, script);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
* Adds a script based field to load and return. The field does not have to
|
||||
* be stored, but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The name that will represent this value in the return hit
|
||||
* @param script The script to use
|
||||
* @param params Parameters that the script can use.
|
||||
* @param name
|
||||
* The name that will represent this value in the return hit
|
||||
* @param script
|
||||
* The script to use
|
||||
* @param params
|
||||
* Parameters that the script can use.
|
||||
* @deprecated Use {@link #addScriptField(String, Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder addScriptField(String name, String script, Map<String, Object> params) {
|
||||
sourceBuilder().scriptField(name, script, params);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a script based field to load and return. The field does not have to be stored,
|
||||
* but its recommended to use non analyzed or numeric fields.
|
||||
* Adds a script based field to load and return. The field does not have to
|
||||
* be stored, but its recommended to use non analyzed or numeric fields.
|
||||
*
|
||||
* @param name The name that will represent this value in the return hit
|
||||
* @param lang The language of the script
|
||||
* @param script The script to use
|
||||
* @param params Parameters that the script can use (can be <tt>null</tt>).
|
||||
* @param name
|
||||
* The name that will represent this value in the return hit
|
||||
* @param lang
|
||||
* The language of the script
|
||||
* @param script
|
||||
* The script to use
|
||||
* @param params
|
||||
* Parameters that the script can use (can be <tt>null</tt>).
|
||||
* @deprecated Use {@link #addScriptField(String, Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder addScriptField(String name, String lang, String script, Map<String, Object> params) {
|
||||
sourceBuilder().scriptField(name, lang, script, params);
|
||||
return this;
|
||||
|
@ -939,16 +968,33 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
* template stuff
|
||||
*/
|
||||
|
||||
public SearchRequestBuilder setTemplate(Template template) {
|
||||
request.template(template);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #setTemplate(Template)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder setTemplateName(String templateName) {
|
||||
request.templateName(templateName);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #setTemplate(Template)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder setTemplateType(ScriptService.ScriptType templateType) {
|
||||
request.templateType(templateType);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #setTemplate(Template)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder setTemplateParams(Map<String, Object> templateParams) {
|
||||
request.templateParams(templateParams);
|
||||
return this;
|
||||
|
|
|
@ -91,7 +91,7 @@ public class TransportSearchDfsQueryAndFetchAction extends TransportSearchTypeAc
|
|||
}
|
||||
}
|
||||
|
||||
void executeSecondPhase(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, DiscoveryNode node, final QuerySearchRequest querySearchRequest) {
|
||||
void executeSecondPhase(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, final DiscoveryNode node, final QuerySearchRequest querySearchRequest) {
|
||||
searchService.sendExecuteFetch(node, querySearchRequest, new ActionListener<QueryFetchSearchResult>() {
|
||||
@Override
|
||||
public void onResponse(QueryFetchSearchResult result) {
|
||||
|
@ -104,7 +104,14 @@ public class TransportSearchDfsQueryAndFetchAction extends TransportSearchTypeAc
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
onSecondPhaseFailure(t, querySearchRequest, shardIndex, dfsResult, counter);
|
||||
try {
|
||||
onSecondPhaseFailure(t, querySearchRequest, shardIndex, dfsResult, counter);
|
||||
} finally {
|
||||
// the query might not have been executed at all (for example because thread pool rejected execution)
|
||||
// and the search context that was created in dfs phase might not be released.
|
||||
// release it again to be in the safe side
|
||||
sendReleaseSearchContext(querySearchRequest.id(), node);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -100,7 +100,7 @@ public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeA
|
|||
}
|
||||
}
|
||||
|
||||
void executeQuery(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, final QuerySearchRequest querySearchRequest, DiscoveryNode node) {
|
||||
void executeQuery(final int shardIndex, final DfsSearchResult dfsResult, final AtomicInteger counter, final QuerySearchRequest querySearchRequest, final DiscoveryNode node) {
|
||||
searchService.sendExecuteQuery(node, querySearchRequest, new ActionListener<QuerySearchResult>() {
|
||||
@Override
|
||||
public void onResponse(QuerySearchResult result) {
|
||||
|
@ -113,7 +113,14 @@ public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeA
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
onQueryFailure(t, querySearchRequest, shardIndex, dfsResult, counter);
|
||||
try {
|
||||
onQueryFailure(t, querySearchRequest, shardIndex, dfsResult, counter);
|
||||
} finally {
|
||||
// the query might not have been executed at all (for example because thread pool rejected execution)
|
||||
// and the search context that was created in dfs phase might not be released.
|
||||
// release it again to be in the safe side
|
||||
sendReleaseSearchContext(querySearchRequest.id(), node);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -176,6 +183,11 @@ public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeA
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
// the search context might not be cleared on the node where the fetch was executed for example
|
||||
// because the action was rejected by the thread pool. in this case we need to send a dedicated
|
||||
// request to clear the search context. by setting docIdsToLoad to null, the context will be cleared
|
||||
// in TransportSearchTypeAction.releaseIrrelevantSearchContexts() after the search request is done.
|
||||
docIdsToLoad.set(shardIndex, null);
|
||||
onFetchFailure(t, fetchSearchRequest, shardIndex, shardTarget, counter);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -35,8 +35,8 @@ import org.elasticsearch.common.util.concurrent.AtomicArray;
|
|||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.action.SearchServiceTransportAction;
|
||||
import org.elasticsearch.search.controller.SearchPhaseController;
|
||||
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
||||
import org.elasticsearch.search.query.QuerySearchResultProvider;
|
||||
|
@ -118,7 +118,10 @@ public class TransportSearchQueryThenFetchAction extends TransportSearchTypeActi
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
// the failure might happen without managing to clear the search context..., potentially need to clear its context (for example)
|
||||
// the search context might not be cleared on the node where the fetch was executed for example
|
||||
// because the action was rejected by the thread pool. in this case we need to send a dedicated
|
||||
// request to clear the search context. by setting docIdsToLoad to null, the context will be cleared
|
||||
// in TransportSearchTypeAction.releaseIrrelevantSearchContexts() after the search request is done.
|
||||
docIdsToLoad.set(shardIndex, null);
|
||||
onFetchFailure(t, fetchSearchRequest, shardIndex, shardTarget, counter);
|
||||
}
|
||||
|
|
|
@ -303,9 +303,7 @@ public abstract class TransportSearchTypeAction extends TransportAction<SearchRe
|
|||
for (AtomicArray.Entry<FirstResult> entry : firstResults.asList()) {
|
||||
try {
|
||||
DiscoveryNode node = nodes.get(entry.value.shardTarget().nodeId());
|
||||
if (node != null) { // should not happen (==null) but safeguard anyhow
|
||||
searchService.sendFreeContext(node, entry.value.id(), request);
|
||||
}
|
||||
sendReleaseSearchContext(entry.value.id(), node);
|
||||
} catch (Throwable t1) {
|
||||
logger.trace("failed to release context", t1);
|
||||
}
|
||||
|
@ -329,9 +327,7 @@ public abstract class TransportSearchTypeAction extends TransportAction<SearchRe
|
|||
&& docIdsToLoad.get(entry.index) == null) { // but none of them made it to the global top docs
|
||||
try {
|
||||
DiscoveryNode node = nodes.get(entry.value.queryResult().shardTarget().nodeId());
|
||||
if (node != null) { // should not happen (==null) but safeguard anyhow
|
||||
searchService.sendFreeContext(node, entry.value.queryResult().id(), request);
|
||||
}
|
||||
sendReleaseSearchContext(entry.value.queryResult().id(), node);
|
||||
} catch (Throwable t1) {
|
||||
logger.trace("failed to release context", t1);
|
||||
}
|
||||
|
@ -340,6 +336,12 @@ public abstract class TransportSearchTypeAction extends TransportAction<SearchRe
|
|||
}
|
||||
}
|
||||
|
||||
protected void sendReleaseSearchContext(long contextId, DiscoveryNode node) {
|
||||
if (node != null) {
|
||||
searchService.sendFreeContext(node, contextId, request);
|
||||
}
|
||||
}
|
||||
|
||||
protected ShardFetchSearchRequest createFetchRequest(QuerySearchResult queryResult, AtomicArray.Entry<IntArrayList> entry, ScoreDoc[] lastEmittedDocPerShard) {
|
||||
if (lastEmittedDocPerShard != null) {
|
||||
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[entry.index];
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.routing.*;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -1080,7 +1080,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
|||
// to find a _meta document
|
||||
// So we have no choice but to index first and send mappings afterwards
|
||||
MapperService mapperService = indexShard.indexService().mapperService();
|
||||
mapperService.merge(request.type(), new CompressedString(update.toBytes()), true);
|
||||
mapperService.merge(request.type(), new CompressedXContent(update.toBytes()), true);
|
||||
created = operation.execute(indexShard);
|
||||
mappingUpdatedAction.updateMappingOnMasterAsynchronously(indexName, request.type(), update);
|
||||
} else {
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.elasticsearch.index.shard.IndexShard;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.lookup.SourceLookup;
|
||||
|
@ -94,7 +93,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
ctx.put("op", "create");
|
||||
ctx.put("_source", upsertDoc);
|
||||
try {
|
||||
ExecutableScript script = scriptService.executable(new Script(request.scriptLang, request.script, request.scriptType, request.scriptParams), ScriptContext.Standard.UPDATE);
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE);
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
|
@ -111,7 +110,8 @@ public class UpdateHelper extends AbstractComponent {
|
|||
// (the default) or "none", meaning abort upsert
|
||||
if (!"create".equals(scriptOpChoice)) {
|
||||
if (!"none".equals(scriptOpChoice)) {
|
||||
logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice, request.script);
|
||||
logger.warn("Used upsert operation [{}] for script [{}], doing nothing...", scriptOpChoice,
|
||||
request.script.getScript());
|
||||
}
|
||||
UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(),
|
||||
getResult.getVersion(), false);
|
||||
|
@ -193,7 +193,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
ctx.put("_source", sourceAndContent.v2());
|
||||
|
||||
try {
|
||||
ExecutableScript script = scriptService.executable(new Script(request.scriptLang, request.script, request.scriptType, request.scriptParams), ScriptContext.Standard.UPDATE);
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE);
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
|
@ -246,7 +246,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
update.setGetResult(extractGetResult(request, indexShard.indexService().index().name(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType);
|
||||
} else {
|
||||
logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script);
|
||||
logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript());
|
||||
UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false);
|
||||
return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType);
|
||||
}
|
||||
|
|
|
@ -19,14 +19,12 @@
|
|||
|
||||
package org.elasticsearch.action.update;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.DocumentRequest;
|
||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -37,11 +35,14 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
@ -59,13 +60,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
private String parent;
|
||||
|
||||
@Nullable
|
||||
String script;
|
||||
@Nullable
|
||||
ScriptService.ScriptType scriptType;
|
||||
@Nullable
|
||||
String scriptLang;
|
||||
@Nullable
|
||||
Map<String, Object> scriptParams;
|
||||
Script script;
|
||||
|
||||
private String[] fields;
|
||||
|
||||
|
@ -205,105 +200,171 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
return this.shardId;
|
||||
}
|
||||
|
||||
public String script() {
|
||||
public Script script() {
|
||||
return this.script;
|
||||
}
|
||||
|
||||
public ScriptService.ScriptType scriptType() { return this.scriptType; }
|
||||
/**
|
||||
* The script to execute. Note, make sure not to send different script each times and instead
|
||||
* use script params if possible with the same (automatically compiled) script.
|
||||
*/
|
||||
public UpdateRequest script(Script script) {
|
||||
this.script = script;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #script()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public String scriptString() {
|
||||
return this.script == null ? null : this.script.getScript();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #script()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public ScriptService.ScriptType scriptType() {
|
||||
return this.script == null ? null : this.script.getType();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #script()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public Map<String, Object> scriptParams() {
|
||||
return this.scriptParams;
|
||||
return this.script == null ? null : this.script.getParams();
|
||||
}
|
||||
|
||||
/**
|
||||
* The script to execute. Note, make sure not to send different script each times and instead
|
||||
* use script params if possible with the same (automatically compiled) script.
|
||||
* The script to execute. Note, make sure not to send different script each
|
||||
* times and instead use script params if possible with the same
|
||||
* (automatically compiled) script.
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest script(String script, ScriptService.ScriptType scriptType) {
|
||||
this.script = script;
|
||||
this.scriptType = scriptType;
|
||||
updateOrCreateScript(script, scriptType, null, null);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The script to execute. Note, make sure not to send different script each times and instead
|
||||
* use script params if possible with the same (automatically compiled) script.
|
||||
* The script to execute. Note, make sure not to send different script each
|
||||
* times and instead use script params if possible with the same
|
||||
* (automatically compiled) script.
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest script(String script) {
|
||||
this.script = script;
|
||||
this.scriptType = ScriptService.ScriptType.INLINE;
|
||||
updateOrCreateScript(script, ScriptType.INLINE, null, null);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The language of the script to execute.
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest scriptLang(String scriptLang) {
|
||||
this.scriptLang = scriptLang;
|
||||
updateOrCreateScript(null, null, scriptLang, null);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #script()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public String scriptLang() {
|
||||
return scriptLang;
|
||||
return script == null ? null : script.getLang();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a script parameter.
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest addScriptParam(String name, Object value) {
|
||||
if (scriptParams == null) {
|
||||
scriptParams = Maps.newHashMap();
|
||||
Script script = script();
|
||||
if (script == null) {
|
||||
HashMap<String, Object> scriptParams = new HashMap<String, Object>();
|
||||
scriptParams.put(name, value);
|
||||
updateOrCreateScript(null, null, null, scriptParams);
|
||||
} else {
|
||||
Map<String, Object> scriptParams = script.getParams();
|
||||
if (scriptParams == null) {
|
||||
scriptParams = new HashMap<String, Object>();
|
||||
scriptParams.put(name, value);
|
||||
updateOrCreateScript(null, null, null, scriptParams);
|
||||
} else {
|
||||
scriptParams.put(name, value);
|
||||
}
|
||||
}
|
||||
scriptParams.put(name, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the script parameters to use with the script.
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest scriptParams(Map<String, Object> scriptParams) {
|
||||
if (this.scriptParams == null) {
|
||||
this.scriptParams = scriptParams;
|
||||
} else {
|
||||
this.scriptParams.putAll(scriptParams);
|
||||
}
|
||||
updateOrCreateScript(null, null, null, scriptParams);
|
||||
return this;
|
||||
}
|
||||
|
||||
private void updateOrCreateScript(String scriptContent, ScriptType type, String lang, Map<String, Object> params) {
|
||||
Script script = script();
|
||||
if (script == null) {
|
||||
script = new Script(scriptContent == null ? "" : scriptContent, type == null ? ScriptType.INLINE : type, lang, params);
|
||||
} else {
|
||||
String newScriptContent = scriptContent == null ? script.getScript() : scriptContent;
|
||||
ScriptType newScriptType = type == null ? script.getType() : type;
|
||||
String newScriptLang = lang == null ? script.getLang() : lang;
|
||||
Map<String, Object> newScriptParams = params == null ? script.getParams() : params;
|
||||
script = new Script(newScriptContent, newScriptType, newScriptLang, newScriptParams);
|
||||
}
|
||||
script(script);
|
||||
}
|
||||
|
||||
/**
|
||||
* The script to execute. Note, make sure not to send different script each times and instead
|
||||
* use script params if possible with the same (automatically compiled) script.
|
||||
* The script to execute. Note, make sure not to send different script each
|
||||
* times and instead use script params if possible with the same
|
||||
* (automatically compiled) script.
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequest script(String script, ScriptService.ScriptType scriptType, @Nullable Map<String, Object> scriptParams) {
|
||||
this.script = script;
|
||||
this.scriptType = scriptType;
|
||||
if (this.scriptParams != null) {
|
||||
this.scriptParams.putAll(scriptParams);
|
||||
} else {
|
||||
this.scriptParams = scriptParams;
|
||||
}
|
||||
this.script = new Script(script, scriptType, null, scriptParams);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The script to execute. Note, make sure not to send different script each times and instead
|
||||
* use script params if possible with the same (automatically compiled) script.
|
||||
* The script to execute. Note, make sure not to send different script each
|
||||
* times and instead use script params if possible with the same
|
||||
* (automatically compiled) script.
|
||||
*
|
||||
* @param script The script to execute
|
||||
* @param scriptLang The script language
|
||||
* @param scriptType The script type
|
||||
* @param scriptParams The script parameters
|
||||
* @param script
|
||||
* The script to execute
|
||||
* @param scriptLang
|
||||
* The script language
|
||||
* @param scriptType
|
||||
* The script type
|
||||
* @param scriptParams
|
||||
* The script parameters
|
||||
*
|
||||
* @deprecated Use {@link #script(Script)} instead
|
||||
*/
|
||||
public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType, @Nullable Map<String, Object> scriptParams) {
|
||||
this.script = script;
|
||||
this.scriptLang = scriptLang;
|
||||
this.scriptType = scriptType;
|
||||
if (this.scriptParams != null) {
|
||||
this.scriptParams.putAll(scriptParams);
|
||||
} else {
|
||||
this.scriptParams = scriptParams;
|
||||
}
|
||||
@Deprecated
|
||||
public UpdateRequest script(String script, @Nullable String scriptLang, ScriptService.ScriptType scriptType,
|
||||
@Nullable Map<String, Object> scriptParams) {
|
||||
this.script = new Script(script, scriptType, scriptLang, scriptParams);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -574,6 +635,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
|
||||
public UpdateRequest source(BytesReference source) throws Exception {
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
Map<String, Object> scriptParams = null;
|
||||
Script script = null;
|
||||
XContentType xContentType = XContentFactory.xContentType(source);
|
||||
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
@ -584,6 +647,8 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("script".equals(currentFieldName) && token == XContentParser.Token.START_OBJECT) {
|
||||
script = Script.parse(parser);
|
||||
} else if ("params".equals(currentFieldName)) {
|
||||
scriptParams = parser.map();
|
||||
} else if ("scripted_upsert".equals(currentFieldName)) {
|
||||
|
@ -604,12 +669,16 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
scriptParameterParser.token(currentFieldName, token, parser);
|
||||
}
|
||||
}
|
||||
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
|
||||
if (scriptValue != null) {
|
||||
script = scriptValue.script();
|
||||
scriptType = scriptValue.scriptType();
|
||||
// Don't have a script using the new API so see if it is specified with the old API
|
||||
if (script == null) {
|
||||
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
|
||||
if (scriptValue != null) {
|
||||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), scriptParams);
|
||||
}
|
||||
}
|
||||
if (script != null) {
|
||||
this.script = script;
|
||||
}
|
||||
scriptLang = scriptParameterParser.lang();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -639,12 +708,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
parent = in.readOptionalString();
|
||||
script = in.readOptionalString();
|
||||
if(Strings.hasLength(script)) {
|
||||
scriptType = ScriptService.ScriptType.readFrom(in);
|
||||
if (in.readBoolean()) {
|
||||
script = Script.readScript(in);
|
||||
}
|
||||
scriptLang = in.readOptionalString();
|
||||
scriptParams = in.readMap();
|
||||
retryOnConflict = in.readVInt();
|
||||
refresh = in.readBoolean();
|
||||
if (in.readBoolean()) {
|
||||
|
@ -677,12 +743,11 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(parent);
|
||||
out.writeOptionalString(script);
|
||||
if (Strings.hasLength(script)) {
|
||||
ScriptService.ScriptType.writeTo(scriptType, out);
|
||||
boolean hasScript = script != null;
|
||||
out.writeBoolean(hasScript);
|
||||
if (hasScript) {
|
||||
script.writeTo(out);
|
||||
}
|
||||
out.writeOptionalString(scriptLang);
|
||||
out.writeMap(scriptParams);
|
||||
out.writeVInt(retryOnConflict);
|
||||
out.writeBoolean(refresh);
|
||||
if (doc == null) {
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.util.Map;
|
||||
|
@ -80,21 +81,43 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
* The script works with the variable <code>ctx</code>, which is bound to the entry,
|
||||
* e.g. <code>ctx._source.mycounter += 1</code>.
|
||||
*
|
||||
*/
|
||||
public UpdateRequestBuilder setScript(Script script) {
|
||||
request.script(script);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The script to execute. Note, make sure not to send different script each
|
||||
* times and instead use script params if possible with the same
|
||||
* (automatically compiled) script.
|
||||
* <p/>
|
||||
* The script works with the variable <code>ctx</code>, which is bound to
|
||||
* the entry, e.g. <code>ctx._source.mycounter += 1</code>.
|
||||
*
|
||||
* @see #setScriptLang(String)
|
||||
* @see #setScriptParams(Map)
|
||||
*
|
||||
* @deprecated use {@link #setScript(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequestBuilder setScript(String script, ScriptService.ScriptType scriptType) {
|
||||
request.script(script, scriptType);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The language of the script to execute.
|
||||
* Valid options are: mvel, js, groovy, python, and native (Java)<br>
|
||||
* The language of the script to execute. Valid options are: mvel, js,
|
||||
* groovy, python, and native (Java)<br>
|
||||
* Default: groovy
|
||||
* <p/>
|
||||
* Ref: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-scripting.html
|
||||
* Ref:
|
||||
* http://www.elasticsearch.org/guide/en/elasticsearch/reference/current
|
||||
* /modules-scripting.html
|
||||
*
|
||||
* @deprecated use {@link #setScript(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequestBuilder setScriptLang(String scriptLang) {
|
||||
request.scriptLang(scriptLang);
|
||||
return this;
|
||||
|
@ -102,7 +125,10 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
|
||||
/**
|
||||
* Sets the script parameters to use with the script.
|
||||
*
|
||||
* @deprecated use {@link #setScript(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequestBuilder setScriptParams(Map<String, Object> scriptParams) {
|
||||
request.scriptParams(scriptParams);
|
||||
return this;
|
||||
|
@ -110,7 +136,10 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
|
||||
/**
|
||||
* Add a script parameter.
|
||||
*
|
||||
* @deprecated use {@link #setScript(Script)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public UpdateRequestBuilder addScriptParam(String name, Object value) {
|
||||
request.addScriptParam(name, value);
|
||||
return this;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.ExceptionsHelper;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.PidFile;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.cli.Terminal;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.CreationException;
|
||||
import org.elasticsearch.common.inject.spi.Message;
|
||||
|
@ -84,14 +85,6 @@ public class Bootstrap {
|
|||
/** initialize native resources */
|
||||
public static void initializeNatives(boolean mlockAll, boolean ctrlHandler, boolean loadSigar) {
|
||||
final ESLogger logger = Loggers.getLogger(Bootstrap.class);
|
||||
// mlockall if requested
|
||||
if (mlockAll) {
|
||||
if (Constants.WINDOWS) {
|
||||
Natives.tryVirtualLock();
|
||||
} else {
|
||||
Natives.tryMlockall();
|
||||
}
|
||||
}
|
||||
|
||||
// check if the user is running as root, and bail
|
||||
if (Natives.definitelyRunningAsRoot()) {
|
||||
|
@ -101,6 +94,15 @@ public class Bootstrap {
|
|||
throw new RuntimeException("don't run elasticsearch as root.");
|
||||
}
|
||||
}
|
||||
|
||||
// mlockall if requested
|
||||
if (mlockAll) {
|
||||
if (Constants.WINDOWS) {
|
||||
Natives.tryVirtualLock();
|
||||
} else {
|
||||
Natives.tryMlockall();
|
||||
}
|
||||
}
|
||||
|
||||
// listener for windows close event
|
||||
if (ctrlHandler) {
|
||||
|
@ -162,8 +164,16 @@ public class Bootstrap {
|
|||
|
||||
// install SM after natives, shutdown hooks, etc.
|
||||
setupSecurity(settings, environment);
|
||||
|
||||
NodeBuilder nodeBuilder = NodeBuilder.nodeBuilder().settings(settings).loadConfigSettings(false);
|
||||
|
||||
// We do not need to reload system properties here as we have already applied them in building the settings and
|
||||
// reloading could cause multiple prompts to the user for values if a system property was specified with a prompt
|
||||
// placeholder
|
||||
Settings nodeSettings = Settings.settingsBuilder()
|
||||
.put(settings)
|
||||
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true)
|
||||
.build();
|
||||
|
||||
NodeBuilder nodeBuilder = NodeBuilder.nodeBuilder().settings(nodeSettings).loadConfigSettings(false);
|
||||
node = nodeBuilder.build();
|
||||
}
|
||||
|
||||
|
@ -194,8 +204,9 @@ public class Bootstrap {
|
|||
}
|
||||
}
|
||||
|
||||
private static Tuple<Settings, Environment> initialSettings() {
|
||||
return InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true);
|
||||
private static Tuple<Settings, Environment> initialSettings(boolean foreground) {
|
||||
Terminal terminal = foreground ? Terminal.DEFAULT : null;
|
||||
return InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true, terminal);
|
||||
}
|
||||
|
||||
private void start() {
|
||||
|
@ -226,7 +237,7 @@ public class Bootstrap {
|
|||
Settings settings = null;
|
||||
Environment environment = null;
|
||||
try {
|
||||
Tuple<Settings, Environment> tuple = initialSettings();
|
||||
Tuple<Settings, Environment> tuple = initialSettings(foreground);
|
||||
settings = tuple.v1();
|
||||
environment = tuple.v2();
|
||||
|
||||
|
|
|
@ -20,33 +20,53 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import com.sun.jna.Native;
|
||||
import com.sun.jna.Structure;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* java mapping to some libc functions
|
||||
*/
|
||||
class JNACLibrary {
|
||||
final class JNACLibrary {
|
||||
|
||||
private static final ESLogger logger = Loggers.getLogger(JNACLibrary.class);
|
||||
|
||||
public static final int MCL_CURRENT = 1;
|
||||
public static final int MCL_FUTURE = 2;
|
||||
|
||||
public static final int ENOMEM = 12;
|
||||
public static final int RLIMIT_MEMLOCK = Constants.MAC_OS_X ? 6 : 8;
|
||||
public static final long RLIM_INFINITY = Constants.MAC_OS_X ? 9223372036854775807L : -1L;
|
||||
|
||||
static {
|
||||
try {
|
||||
Native.register("c");
|
||||
} catch (UnsatisfiedLinkError e) {
|
||||
logger.warn("unable to link C library. native methods (mlockall) will be disabled.");
|
||||
logger.warn("unable to link C library. native methods (mlockall) will be disabled.", e);
|
||||
}
|
||||
}
|
||||
|
||||
static native int mlockall(int flags);
|
||||
|
||||
static native int geteuid();
|
||||
|
||||
/** corresponds to struct rlimit */
|
||||
public static final class Rlimit extends Structure implements Structure.ByReference {
|
||||
public long rlim_cur = 0;
|
||||
public long rlim_max = 0;
|
||||
|
||||
@Override
|
||||
protected List getFieldOrder() {
|
||||
return Arrays.asList(new String[] { "rlim_cur", "rlim_max" });
|
||||
}
|
||||
}
|
||||
|
||||
static native int getrlimit(int resource, Rlimit rlimit);
|
||||
|
||||
static native String strerror(int errno);
|
||||
|
||||
private JNACLibrary() {
|
||||
}
|
||||
|
|
|
@ -26,8 +26,6 @@ import org.elasticsearch.common.logging.ESLogger;
|
|||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
import static org.elasticsearch.bootstrap.JNAKernel32Library.SizeT;
|
||||
|
||||
/**
|
||||
|
@ -43,30 +41,66 @@ class JNANatives {
|
|||
|
||||
static void tryMlockall() {
|
||||
int errno = Integer.MIN_VALUE;
|
||||
String errMsg = null;
|
||||
boolean rlimitSuccess = false;
|
||||
long softLimit = 0;
|
||||
long hardLimit = 0;
|
||||
|
||||
try {
|
||||
int result = JNACLibrary.mlockall(JNACLibrary.MCL_CURRENT);
|
||||
if (result != 0) {
|
||||
errno = Native.getLastError();
|
||||
} else {
|
||||
if (result == 0) {
|
||||
LOCAL_MLOCKALL = true;
|
||||
return;
|
||||
}
|
||||
|
||||
errno = Native.getLastError();
|
||||
errMsg = JNACLibrary.strerror(errno);
|
||||
if (Constants.LINUX || Constants.MAC_OS_X) {
|
||||
// we only know RLIMIT_MEMLOCK for these two at the moment.
|
||||
JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit();
|
||||
if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_MEMLOCK, rlimit) == 0) {
|
||||
rlimitSuccess = true;
|
||||
softLimit = rlimit.rlim_cur;
|
||||
hardLimit = rlimit.rlim_max;
|
||||
} else {
|
||||
logger.warn("Unable to retrieve resource limits: " + JNACLibrary.strerror(Native.getLastError()));
|
||||
}
|
||||
}
|
||||
} catch (UnsatisfiedLinkError e) {
|
||||
// this will have already been logged by CLibrary, no need to repeat it
|
||||
return;
|
||||
}
|
||||
|
||||
if (errno != Integer.MIN_VALUE) {
|
||||
if (errno == JNACLibrary.ENOMEM && System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("linux")) {
|
||||
logger.warn("Unable to lock JVM memory (ENOMEM)."
|
||||
+ " This can result in part of the JVM being swapped out."
|
||||
+ " Increase RLIMIT_MEMLOCK (ulimit).");
|
||||
} else if (!System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("mac")) {
|
||||
// OS X allows mlockall to be called, but always returns an error
|
||||
logger.warn("Unknown mlockall error " + errno);
|
||||
// mlockall failed for some reason
|
||||
logger.warn("Unable to lock JVM Memory: error=" + errno + ",reason=" + errMsg + ". This can result in part of the JVM being swapped out.");
|
||||
if (errno == JNACLibrary.ENOMEM) {
|
||||
if (rlimitSuccess) {
|
||||
logger.warn("Increase RLIMIT_MEMLOCK, soft limit: " + rlimitToString(softLimit) + ", hard limit: " + rlimitToString(hardLimit));
|
||||
if (Constants.LINUX) {
|
||||
// give specific instructions for the linux case to make it easy
|
||||
logger.warn("These can be adjusted by modifying /etc/security/limits.conf, for example: \n" +
|
||||
"\t# allow user 'esuser' mlockall\n" +
|
||||
"\tesuser soft memlock unlimited\n" +
|
||||
"\tesuser hard memlock unlimited"
|
||||
);
|
||||
logger.warn("If you are logged in interactively, you will have to re-login for the new limits to take effect.");
|
||||
}
|
||||
} else {
|
||||
logger.warn("Increase RLIMIT_MEMLOCK (ulimit).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static String rlimitToString(long value) {
|
||||
assert Constants.LINUX || Constants.MAC_OS_X;
|
||||
if (value == JNACLibrary.RLIM_INFINITY) {
|
||||
return "unlimited";
|
||||
} else {
|
||||
// TODO, on java 8 use Long.toUnsignedString, since thats what it is.
|
||||
return Long.toString(value);
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns true if user is root, false if not, or if we don't know */
|
||||
static boolean definitelyRunningAsRoot() {
|
||||
if (Constants.WINDOWS) {
|
||||
|
|
|
@ -20,8 +20,14 @@
|
|||
package org.elasticsearch.client.transport;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.*;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.client.support.Headers;
|
||||
|
@ -30,7 +36,6 @@ import org.elasticsearch.cluster.ClusterNameModule;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
|
@ -122,8 +127,6 @@ public class TransportClient extends AbstractClient {
|
|||
|
||||
Version version = Version.CURRENT;
|
||||
|
||||
CompressorFactory.configure(this.settings);
|
||||
|
||||
final ThreadPool threadPool = new ThreadPool(settings);
|
||||
|
||||
boolean success = false;
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.cluster.service.InternalClusterService;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -422,7 +422,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
|
|||
builder.endObject();
|
||||
|
||||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedString> cursor1 : templateMetaData.mappings()) {
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor1 : templateMetaData.mappings()) {
|
||||
byte[] mappingSource = cursor1.value.uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
|
||||
Map<String, Object> mapping = parser.map();
|
||||
|
|
|
@ -59,6 +59,10 @@ public class DiskUsage {
|
|||
return 100.0 * ((double)freeBytes / totalBytes);
|
||||
}
|
||||
|
||||
public double getUsedDiskAsPercentage() {
|
||||
return 100.0 - getFreeDiskAsPercentage();
|
||||
}
|
||||
|
||||
public long getFreeBytes() {
|
||||
return freeBytes;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableSet;
|
|||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -45,7 +45,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
|
||||
private final String alias;
|
||||
|
||||
private final CompressedString filter;
|
||||
private final CompressedXContent filter;
|
||||
|
||||
private final String indexRouting;
|
||||
|
||||
|
@ -53,7 +53,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
|
||||
private final Set<String> searchRoutingValues;
|
||||
|
||||
private AliasMetaData(String alias, CompressedString filter, String indexRouting, String searchRouting) {
|
||||
private AliasMetaData(String alias, CompressedXContent filter, String indexRouting, String searchRouting) {
|
||||
this.alias = alias;
|
||||
this.filter = filter;
|
||||
this.indexRouting = indexRouting;
|
||||
|
@ -77,11 +77,11 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
return alias();
|
||||
}
|
||||
|
||||
public CompressedString filter() {
|
||||
public CompressedXContent filter() {
|
||||
return filter;
|
||||
}
|
||||
|
||||
public CompressedString getFilter() {
|
||||
public CompressedXContent getFilter() {
|
||||
return filter();
|
||||
}
|
||||
|
||||
|
@ -176,9 +176,9 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
@Override
|
||||
public AliasMetaData readFrom(StreamInput in) throws IOException {
|
||||
String alias = in.readString();
|
||||
CompressedString filter = null;
|
||||
CompressedXContent filter = null;
|
||||
if (in.readBoolean()) {
|
||||
filter = CompressedString.readCompressedString(in);
|
||||
filter = CompressedXContent.readCompressedString(in);
|
||||
}
|
||||
String indexRouting = null;
|
||||
if (in.readBoolean()) {
|
||||
|
@ -195,7 +195,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
|
||||
private final String alias;
|
||||
|
||||
private CompressedString filter;
|
||||
private CompressedXContent filter;
|
||||
|
||||
private String indexRouting;
|
||||
|
||||
|
@ -217,7 +217,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
return alias;
|
||||
}
|
||||
|
||||
public Builder filter(CompressedString filter) {
|
||||
public Builder filter(CompressedXContent filter) {
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
@ -244,7 +244,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
}
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().map(filter);
|
||||
this.filter = new CompressedString(builder.bytes());
|
||||
this.filter = new CompressedXContent(builder.bytes());
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
|
||||
|
@ -324,7 +324,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
}
|
||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
if ("filter".equals(currentFieldName)) {
|
||||
builder.filter(new CompressedString(parser.binaryValue()));
|
||||
builder.filter(new CompressedXContent(parser.binaryValue()));
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if ("routing".equals(currentFieldName)) {
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.elasticsearch.cluster.routing.Murmur3HashFunction;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -874,7 +874,7 @@ public class IndexMetaData implements Diffable<IndexMetaData> {
|
|||
if ("mappings".equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
builder.putMapping(new MappingMetaData(new CompressedString(parser.binaryValue())));
|
||||
builder.putMapping(new MappingMetaData(new CompressedXContent(parser.binaryValue())));
|
||||
} else {
|
||||
Map<String, Object> mapping = parser.mapOrdered();
|
||||
if (mapping.size() == 1) {
|
||||
|
|
|
@ -24,7 +24,7 @@ import com.google.common.collect.Sets;
|
|||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -54,13 +54,13 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
private final Settings settings;
|
||||
|
||||
// the mapping source should always include the type as top level
|
||||
private final ImmutableOpenMap<String, CompressedString> mappings;
|
||||
private final ImmutableOpenMap<String, CompressedXContent> mappings;
|
||||
|
||||
private final ImmutableOpenMap<String, AliasMetaData> aliases;
|
||||
|
||||
private final ImmutableOpenMap<String, IndexMetaData.Custom> customs;
|
||||
|
||||
public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap<String, CompressedString> mappings,
|
||||
public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap<String, CompressedXContent> mappings,
|
||||
ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, IndexMetaData.Custom> customs) {
|
||||
this.name = name;
|
||||
this.order = order;
|
||||
|
@ -103,11 +103,11 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
return settings();
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, CompressedString> mappings() {
|
||||
public ImmutableOpenMap<String, CompressedXContent> mappings() {
|
||||
return this.mappings;
|
||||
}
|
||||
|
||||
public ImmutableOpenMap<String, CompressedString> getMappings() {
|
||||
public ImmutableOpenMap<String, CompressedXContent> getMappings() {
|
||||
return this.mappings;
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
builder.settings(Settings.readSettingsFromStream(in));
|
||||
int mappingsSize = in.readVInt();
|
||||
for (int i = 0; i < mappingsSize; i++) {
|
||||
builder.putMapping(in.readString(), CompressedString.readCompressedString(in));
|
||||
builder.putMapping(in.readString(), CompressedXContent.readCompressedString(in));
|
||||
}
|
||||
int aliasesSize = in.readVInt();
|
||||
for (int i = 0; i < aliasesSize; i++) {
|
||||
|
@ -193,7 +193,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
out.writeString(template);
|
||||
Settings.writeSettingsToStream(settings, out);
|
||||
out.writeVInt(mappings.size());
|
||||
for (ObjectObjectCursor<String, CompressedString> cursor : mappings) {
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : mappings) {
|
||||
out.writeString(cursor.key);
|
||||
cursor.value.writeTo(out);
|
||||
}
|
||||
|
@ -223,7 +223,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, CompressedString> mappings;
|
||||
private final ImmutableOpenMap.Builder<String, CompressedXContent> mappings;
|
||||
|
||||
private final ImmutableOpenMap.Builder<String, AliasMetaData> aliases;
|
||||
|
||||
|
@ -276,13 +276,13 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder putMapping(String mappingType, CompressedString mappingSource) throws IOException {
|
||||
public Builder putMapping(String mappingType, CompressedXContent mappingSource) throws IOException {
|
||||
mappings.put(mappingType, mappingSource);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder putMapping(String mappingType, String mappingSource) throws IOException {
|
||||
mappings.put(mappingType, new CompressedString(mappingSource));
|
||||
mappings.put(mappingType, new CompressedXContent(mappingSource));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -327,7 +327,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
if (params.paramAsBoolean("reduce_mappings", false)) {
|
||||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) {
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetaData.mappings()) {
|
||||
byte[] mappingSource = cursor.value.uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
|
||||
Map<String, Object> mapping = parser.map();
|
||||
|
@ -341,7 +341,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
builder.endObject();
|
||||
} else {
|
||||
builder.startArray("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedString> cursor : indexTemplateMetaData.mappings()) {
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetaData.mappings()) {
|
||||
byte[] data = cursor.value.uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(data).createParser(data);
|
||||
Map<String, Object> mapping = parser.mapOrderedAndClose();
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.action.TimestampParsingException;
|
|||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
|
@ -276,7 +276,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
private final String type;
|
||||
|
||||
private final CompressedString source;
|
||||
private final CompressedXContent source;
|
||||
|
||||
private Id id;
|
||||
private Routing routing;
|
||||
|
@ -294,9 +294,9 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
this.hasParentField = docMapper.parentFieldMapper().active();
|
||||
}
|
||||
|
||||
public MappingMetaData(CompressedString mapping) throws IOException {
|
||||
public MappingMetaData(CompressedXContent mapping) throws IOException {
|
||||
this.source = mapping;
|
||||
Map<String, Object> mappingMap = XContentHelper.createParser(mapping.compressed(), 0, mapping.compressed().length).mapOrderedAndClose();
|
||||
Map<String, Object> mappingMap = XContentHelper.createParser(mapping.compressedReference()).mapOrderedAndClose();
|
||||
if (mappingMap.size() != 1) {
|
||||
throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string());
|
||||
}
|
||||
|
@ -311,7 +311,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
public MappingMetaData(String type, Map<String, Object> mapping) throws IOException {
|
||||
this.type = type;
|
||||
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping);
|
||||
this.source = new CompressedString(mappingBuilder.bytes());
|
||||
this.source = new CompressedXContent(mappingBuilder.bytes());
|
||||
Map<String, Object> withoutType = mapping;
|
||||
if (mapping.size() == 1 && mapping.containsKey(type)) {
|
||||
withoutType = (Map<String, Object>) mapping.get(type);
|
||||
|
@ -322,7 +322,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
private MappingMetaData() {
|
||||
this.type = "";
|
||||
try {
|
||||
this.source = new CompressedString("");
|
||||
this.source = new CompressedXContent("{}");
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Cannot create MappingMetaData prototype", ex);
|
||||
}
|
||||
|
@ -393,7 +393,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
}
|
||||
}
|
||||
|
||||
public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) {
|
||||
public MappingMetaData(String type, CompressedXContent source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) {
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
this.id = id;
|
||||
|
@ -418,7 +418,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
return this.type;
|
||||
}
|
||||
|
||||
public CompressedString source() {
|
||||
public CompressedXContent source() {
|
||||
return this.source;
|
||||
}
|
||||
|
||||
|
@ -430,7 +430,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
* Converts the serialized compressed form of the mappings into a parsed map.
|
||||
*/
|
||||
public Map<String, Object> sourceAsMap() throws IOException {
|
||||
Map<String, Object> mapping = XContentHelper.convertToMap(source.compressed(), 0, source.compressed().length, true).v2();
|
||||
Map<String, Object> mapping = XContentHelper.convertToMap(source.compressedReference(), true).v2();
|
||||
if (mapping.size() == 1 && mapping.containsKey(type())) {
|
||||
// the type name is the root value, reduce it
|
||||
mapping = (Map<String, Object>) mapping.get(type());
|
||||
|
@ -599,7 +599,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
|
|||
|
||||
public MappingMetaData readFrom(StreamInput in) throws IOException {
|
||||
String type = in.readString();
|
||||
CompressedString source = CompressedString.readCompressedString(in);
|
||||
CompressedXContent source = CompressedXContent.readCompressedString(in);
|
||||
// id
|
||||
Id id = new Id(in.readBoolean() ? in.readString() : null);
|
||||
// routing
|
||||
|
|
|
@ -46,7 +46,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
|
@ -252,7 +252,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
// apply templates, merging the mappings into the request mapping if exists
|
||||
for (IndexTemplateMetaData template : templates) {
|
||||
templateNames.add(template.getName());
|
||||
for (ObjectObjectCursor<String, CompressedString> cursor : template.mappings()) {
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : template.mappings()) {
|
||||
if (mappings.containsKey(cursor.key)) {
|
||||
XContentHelper.mergeDefaults(mappings.get(cursor.key), parseMapping(cursor.value.string()));
|
||||
} else {
|
||||
|
@ -355,7 +355,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
// first, add the default mapping
|
||||
if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
try {
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false);
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false);
|
||||
} catch (Exception e) {
|
||||
removalReason = "failed on parsing default mapping on index creation";
|
||||
throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e);
|
||||
|
@ -367,7 +367,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
try {
|
||||
// apply the default here, its the first time we parse it
|
||||
mapperService.merge(entry.getKey(), new CompressedString(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true);
|
||||
mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true);
|
||||
} catch (Exception e) {
|
||||
removalReason = "failed on parsing mappings on index creation";
|
||||
throw new MapperParsingException("mapping [" + entry.getKey() + "]", e);
|
||||
|
|
|
@ -19,9 +19,11 @@
|
|||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
|
@ -32,14 +34,14 @@ import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
|
|||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.indices.IndexMissingException;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
|
@ -91,11 +93,11 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
|
||||
static class UpdateTask extends MappingTask {
|
||||
final String type;
|
||||
final CompressedString mappingSource;
|
||||
final CompressedXContent mappingSource;
|
||||
final String nodeId; // null fr unknown
|
||||
final ActionListener<ClusterStateUpdateResponse> listener;
|
||||
|
||||
UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, String nodeId, ActionListener<ClusterStateUpdateResponse> listener) {
|
||||
UpdateTask(String index, String indexUUID, String type, CompressedXContent mappingSource, String nodeId, ActionListener<ClusterStateUpdateResponse> listener) {
|
||||
super(index, indexUUID);
|
||||
this.type = type;
|
||||
this.mappingSource = mappingSource;
|
||||
|
@ -254,7 +256,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
UpdateTask updateTask = (UpdateTask) task;
|
||||
try {
|
||||
String type = updateTask.type;
|
||||
CompressedString mappingSource = updateTask.mappingSource;
|
||||
CompressedXContent mappingSource = updateTask.mappingSource;
|
||||
|
||||
MappingMetaData mappingMetaData = builder.mapping(type);
|
||||
if (mappingMetaData != null && mappingMetaData.source().equals(mappingSource)) {
|
||||
|
@ -376,9 +378,9 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
DocumentMapper existingMapper = indexService.mapperService().documentMapper(request.type());
|
||||
if (MapperService.DEFAULT_MAPPING.equals(request.type())) {
|
||||
// _default_ types do not go through merging, but we do test the new settings. Also don't apply the old default
|
||||
newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), false);
|
||||
newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), false);
|
||||
} else {
|
||||
newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), existingMapper == null);
|
||||
newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null);
|
||||
if (existingMapper != null) {
|
||||
// first, simulate
|
||||
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true);
|
||||
|
@ -386,9 +388,26 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
if (mergeResult.hasConflicts()) {
|
||||
throw new MergeMappingException(mergeResult.buildConflicts());
|
||||
}
|
||||
} else {
|
||||
// TODO: can we find a better place for this validation?
|
||||
// The reason this validation is here is that the mapper service doesn't learn about
|
||||
// new types all at once , which can create a false error.
|
||||
|
||||
// For example in MapperService we can't distinguish between a create index api call
|
||||
// and a put mapping api call, so we don't which type did exist before.
|
||||
// Also the order of the mappings may be backwards.
|
||||
if (Version.indexCreated(indexService.getIndexSettings()).onOrAfter(Version.V_2_0_0) && newMapper.parentFieldMapper().active()) {
|
||||
IndexMetaData indexMetaData = currentState.metaData().index(index);
|
||||
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.mappings().values()) {
|
||||
if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) {
|
||||
throw new IllegalArgumentException("can't add a _parent field that points to an already existing type");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
newMappers.put(index, newMapper);
|
||||
if (existingMapper != null) {
|
||||
existingMappers.put(index, existingMapper);
|
||||
|
@ -415,12 +434,12 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
continue;
|
||||
}
|
||||
|
||||
CompressedString existingSource = null;
|
||||
CompressedXContent existingSource = null;
|
||||
if (existingMappers.containsKey(entry.getKey())) {
|
||||
existingSource = existingMappers.get(entry.getKey()).mappingSource();
|
||||
}
|
||||
DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false);
|
||||
CompressedString updatedSource = mergedMapper.mappingSource();
|
||||
CompressedXContent updatedSource = mergedMapper.mappingSource();
|
||||
|
||||
if (existingSource != null) {
|
||||
if (existingSource.equals(updatedSource)) {
|
||||
|
|
|
@ -202,9 +202,7 @@ public class RepositoriesMetaData extends AbstractDiffable<Custom> implements Me
|
|||
builder.startObject(repository.name(), XContentBuilder.FieldCaseConversion.NONE);
|
||||
builder.field("type", repository.type());
|
||||
builder.startObject("settings");
|
||||
for (Map.Entry<String, String> settingEntry : repository.settings().getAsMap().entrySet()) {
|
||||
builder.field(settingEntry.getKey(), settingEntry.getValue());
|
||||
}
|
||||
repository.settings().toXContent(builder, params);
|
||||
builder.endObject();
|
||||
|
||||
builder.endObject();
|
||||
|
|
|
@ -142,20 +142,20 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
private void warnAboutDiskIfNeeded(DiskUsage usage) {
|
||||
// Check absolute disk values
|
||||
if (usage.getFreeBytes() < DiskThresholdDecider.this.freeBytesThresholdHigh.bytes()) {
|
||||
logger.warn("high disk watermark [{} free] exceeded on {}, shards will be relocated away from this node",
|
||||
logger.warn("high disk watermark [{}] exceeded on {}, shards will be relocated away from this node",
|
||||
DiskThresholdDecider.this.freeBytesThresholdHigh, usage);
|
||||
} else if (usage.getFreeBytes() < DiskThresholdDecider.this.freeBytesThresholdLow.bytes()) {
|
||||
logger.info("low disk watermark [{} free] exceeded on {}, replicas will not be assigned to this node",
|
||||
logger.info("low disk watermark [{}] exceeded on {}, replicas will not be assigned to this node",
|
||||
DiskThresholdDecider.this.freeBytesThresholdLow, usage);
|
||||
}
|
||||
|
||||
// Check percentage disk values
|
||||
if (usage.getFreeDiskAsPercentage() < DiskThresholdDecider.this.freeDiskThresholdHigh) {
|
||||
logger.warn("high disk watermark [{} free] exceeded on {}, shards will be relocated away from this node",
|
||||
Strings.format1Decimals(DiskThresholdDecider.this.freeDiskThresholdHigh, "%"), usage);
|
||||
logger.warn("high disk watermark [{}] exceeded on {}, shards will be relocated away from this node",
|
||||
Strings.format1Decimals(100.0 - DiskThresholdDecider.this.freeDiskThresholdHigh, "%"), usage);
|
||||
} else if (usage.getFreeDiskAsPercentage() < DiskThresholdDecider.this.freeDiskThresholdLow) {
|
||||
logger.info("low disk watermark [{} free] exceeded on {}, replicas will not be assigned to this node",
|
||||
Strings.format1Decimals(DiskThresholdDecider.this.freeDiskThresholdLow, "%"), usage);
|
||||
logger.info("low disk watermark [{}] exceeded on {}, replicas will not be assigned to this node",
|
||||
Strings.format1Decimals(100.0 - DiskThresholdDecider.this.freeDiskThresholdLow, "%"), usage);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,6 +234,16 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
return freeDiskThresholdHigh;
|
||||
}
|
||||
|
||||
// For Testing
|
||||
public Double getUsedDiskThresholdLow() {
|
||||
return 100.0 - freeDiskThresholdLow;
|
||||
}
|
||||
|
||||
// For Testing
|
||||
public Double getUsedDiskThresholdHigh() {
|
||||
return 100.0 - freeDiskThresholdHigh;
|
||||
}
|
||||
|
||||
// For Testing
|
||||
public ByteSizeValue getFreeBytesThresholdLow() {
|
||||
return freeBytesThresholdLow;
|
||||
|
@ -285,6 +295,8 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
|
||||
@Override
|
||||
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
|
||||
double usedDiskThresholdLow = 100.0 - DiskThresholdDecider.this.freeDiskThresholdLow;
|
||||
double usedDiskThresholdHigh = 100.0 - DiskThresholdDecider.this.freeDiskThresholdHigh;
|
||||
|
||||
// Always allow allocation if the decider is disabled
|
||||
if (!enabled) {
|
||||
|
@ -342,9 +354,11 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
|
||||
// First, check that the node currently over the low watermark
|
||||
double freeDiskPercentage = usage.getFreeDiskAsPercentage();
|
||||
// Cache the used disk percentage for displaying disk percentages consistent with documentation
|
||||
double usedDiskPercentage = usage.getUsedDiskAsPercentage();
|
||||
long freeBytes = usage.getFreeBytes();
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Node [{}] has {}% free disk", node.nodeId(), freeDiskPercentage);
|
||||
logger.trace("Node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage);
|
||||
}
|
||||
|
||||
// a flag for whether the primary shard has been previously allocated
|
||||
|
@ -387,20 +401,20 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
// If the shard is a replica or has a primary that has already been allocated before, check the low threshold
|
||||
if (!shardRouting.primary() || (shardRouting.primary() && primaryHasBeenAllocated)) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Less than the required {} free disk threshold ({} free) on node [{}], preventing allocation",
|
||||
Strings.format1Decimals(freeDiskThresholdLow, "%"),
|
||||
Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId());
|
||||
logger.debug("More than the allowed {} used disk threshold ({} used) on node [{}], preventing allocation",
|
||||
Strings.format1Decimals(usedDiskThresholdLow, "%"),
|
||||
Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId());
|
||||
}
|
||||
return allocation.decision(Decision.NO, NAME, "less than required [%s%%] free disk on node, free: [%s%%]",
|
||||
freeDiskThresholdLow, freeDiskPercentage);
|
||||
return allocation.decision(Decision.NO, NAME, "more than allowed [%s%%] used disk on node, free: [%s%%]",
|
||||
usedDiskThresholdLow, freeDiskPercentage);
|
||||
} else if (freeDiskPercentage > freeDiskThresholdHigh) {
|
||||
// Allow the shard to be allocated because it is primary that
|
||||
// has never been allocated if it's under the high watermark
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Less than the required {} free disk threshold ({} free) on node [{}], " +
|
||||
logger.debug("More than the allowed {} used disk threshold ({} used) on node [{}], " +
|
||||
"but allowing allocation because primary has never been allocated",
|
||||
Strings.format1Decimals(freeDiskThresholdLow, "%"),
|
||||
Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId());
|
||||
Strings.format1Decimals(usedDiskThresholdLow, "%"),
|
||||
Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId());
|
||||
}
|
||||
return allocation.decision(Decision.YES, NAME, "primary has never been allocated before");
|
||||
} else {
|
||||
|
@ -412,8 +426,8 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
Strings.format1Decimals(freeDiskThresholdHigh, "%"),
|
||||
Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId());
|
||||
}
|
||||
return allocation.decision(Decision.NO, NAME, "less than required [%s%%] free disk on node, free: [%s%%]",
|
||||
freeDiskThresholdLow, freeDiskPercentage);
|
||||
return allocation.decision(Decision.NO, NAME, "more than allowed [%s%%] used disk on node, free: [%s%%]",
|
||||
usedDiskThresholdHigh, freeDiskPercentage);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -429,10 +443,10 @@ public class DiskThresholdDecider extends AllocationDecider {
|
|||
freeBytesThresholdLow, new ByteSizeValue(freeBytesAfterShard));
|
||||
}
|
||||
if (freeSpaceAfterShard < freeDiskThresholdHigh) {
|
||||
logger.warn("After allocating, node [{}] would have less than the required {} free disk threshold ({} free), preventing allocation",
|
||||
logger.warn("After allocating, node [{}] would have more than the allowed {} free disk threshold ({} free), preventing allocation",
|
||||
node.nodeId(), Strings.format1Decimals(freeDiskThresholdHigh, "%"), Strings.format1Decimals(freeSpaceAfterShard, "%"));
|
||||
return allocation.decision(Decision.NO, NAME, "after allocation less than required [%s%%] free disk on node, free: [%s%%]",
|
||||
freeDiskThresholdLow, freeSpaceAfterShard);
|
||||
return allocation.decision(Decision.NO, NAME, "after allocation more than allowed [%s%%] used disk on node, free: [%s%%]",
|
||||
usedDiskThresholdLow, freeSpaceAfterShard);
|
||||
}
|
||||
|
||||
return allocation.decision(Decision.YES, NAME, "enough disk for shard on node, free: [%s]", new ByteSizeValue(freeBytes));
|
||||
|
|
|
@ -352,6 +352,7 @@ public class PagedBytesReference implements BytesReference {
|
|||
private final int offset;
|
||||
private final int length;
|
||||
private int pos;
|
||||
private int mark;
|
||||
|
||||
public PagedBytesReferenceStreamInput(ByteArray bytearray, int offset, int length) {
|
||||
this.bytearray = bytearray;
|
||||
|
@ -420,9 +421,19 @@ public class PagedBytesReference implements BytesReference {
|
|||
return copiedBytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean markSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mark(int readlimit) {
|
||||
this.mark = pos;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
pos = 0;
|
||||
pos = mark;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -93,7 +93,7 @@ public abstract class CliTool {
|
|||
Preconditions.checkArgument(config.cmds().size() != 0, "At least one command must be configured");
|
||||
this.config = config;
|
||||
this.terminal = terminal;
|
||||
Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true);
|
||||
Tuple<Settings, Environment> tuple = InternalSettingsPreparer.prepareSettings(EMPTY_SETTINGS, true, terminal);
|
||||
settings = tuple.v1();
|
||||
env = tuple.v2();
|
||||
}
|
||||
|
|
|
@ -30,10 +30,9 @@ import java.io.IOException;
|
|||
* @deprecated Used only for backward comp. to read old compressed files, since we now use codec based compression
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class CompressedIndexInput<T extends CompressorContext> extends IndexInput {
|
||||
public abstract class CompressedIndexInput extends IndexInput {
|
||||
|
||||
private IndexInput in;
|
||||
protected final T context;
|
||||
|
||||
private int version;
|
||||
private long totalUncompressedLength;
|
||||
|
@ -48,10 +47,9 @@ public abstract class CompressedIndexInput<T extends CompressorContext> extends
|
|||
private int currentOffsetIdx;
|
||||
private long currentUncompressedChunkPointer;
|
||||
|
||||
public CompressedIndexInput(IndexInput in, T context) throws IOException {
|
||||
public CompressedIndexInput(IndexInput in) throws IOException {
|
||||
super("compressed(" + in.toString() + ")");
|
||||
this.in = in;
|
||||
this.context = context;
|
||||
readHeader(in);
|
||||
this.version = in.readInt();
|
||||
long metaDataPosition = in.readLong();
|
||||
|
|
|
@ -27,10 +27,9 @@ import java.io.IOException;
|
|||
|
||||
/**
|
||||
*/
|
||||
public abstract class CompressedStreamInput<T extends CompressorContext> extends StreamInput {
|
||||
public abstract class CompressedStreamInput extends StreamInput {
|
||||
|
||||
private final StreamInput in;
|
||||
protected final CompressorContext context;
|
||||
|
||||
private boolean closed;
|
||||
|
||||
|
@ -38,9 +37,8 @@ public abstract class CompressedStreamInput<T extends CompressorContext> extends
|
|||
private int position = 0;
|
||||
private int valid = 0;
|
||||
|
||||
public CompressedStreamInput(StreamInput in, T context) throws IOException {
|
||||
public CompressedStreamInput(StreamInput in) throws IOException {
|
||||
this.in = in;
|
||||
this.context = context;
|
||||
super.setVersion(in.getVersion());
|
||||
readHeader(in);
|
||||
}
|
||||
|
@ -51,13 +49,6 @@ public abstract class CompressedStreamInput<T extends CompressorContext> extends
|
|||
return super.setVersion(version);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert!, resets to buffer start, without the need to decompress it again.
|
||||
*/
|
||||
public void resetToBufferStart() {
|
||||
this.position = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method is overridden to report number of bytes that can now be read
|
||||
* from decoded data buffer, without reading bytes from the underlying
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.common.compress;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
|
@ -34,33 +35,32 @@ import java.util.Arrays;
|
|||
* memory. Note that the compressed string might still sometimes need to be
|
||||
* decompressed in order to perform equality checks or to compute hash codes.
|
||||
*/
|
||||
public final class CompressedString {
|
||||
public final class CompressedXContent {
|
||||
|
||||
private final byte[] bytes;
|
||||
private int hashCode;
|
||||
|
||||
public CompressedString(BytesReference data) throws IOException {
|
||||
public CompressedXContent(BytesReference data) throws IOException {
|
||||
Compressor compressor = CompressorFactory.compressor(data);
|
||||
if (compressor != null) {
|
||||
// already compressed...
|
||||
this.bytes = data.toBytes();
|
||||
} else {
|
||||
BytesArray bytesArray = data.toBytesArray();
|
||||
this.bytes = CompressorFactory.defaultCompressor().compress(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length());
|
||||
assert CompressorFactory.compressor(bytes) != null;
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
try (StreamOutput compressedOutput = CompressorFactory.defaultCompressor().streamOutput(out)) {
|
||||
data.writeTo(compressedOutput);
|
||||
}
|
||||
this.bytes = out.bytes().toBytes();
|
||||
assert CompressorFactory.compressor(new BytesArray(bytes)) != null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public CompressedString(byte[] data, int offset, int length) throws IOException {
|
||||
this(new BytesArray(data, offset, length));
|
||||
public CompressedXContent(byte[] data) throws IOException {
|
||||
this(new BytesArray(data));
|
||||
}
|
||||
|
||||
public CompressedString(byte[] data) throws IOException {
|
||||
this(data, 0, data.length);
|
||||
}
|
||||
|
||||
public CompressedString(String str) throws IOException {
|
||||
public CompressedXContent(String str) throws IOException {
|
||||
this(new BytesArray(new BytesRef(str)));
|
||||
}
|
||||
|
||||
|
@ -69,12 +69,15 @@ public final class CompressedString {
|
|||
return this.bytes;
|
||||
}
|
||||
|
||||
/** Return the compressed bytes as a {@link BytesReference}. */
|
||||
public BytesReference compressedReference() {
|
||||
return new BytesArray(bytes);
|
||||
}
|
||||
|
||||
/** Return the uncompressed bytes. */
|
||||
public byte[] uncompressed() {
|
||||
Compressor compressor = CompressorFactory.compressor(bytes);
|
||||
assert compressor != null;
|
||||
try {
|
||||
return compressor.uncompress(bytes, 0, bytes.length);
|
||||
return CompressorFactory.uncompress(new BytesArray(bytes)).toBytes();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot decompress compressed string", e);
|
||||
}
|
||||
|
@ -84,10 +87,10 @@ public final class CompressedString {
|
|||
return new BytesRef(uncompressed()).utf8ToString();
|
||||
}
|
||||
|
||||
public static CompressedString readCompressedString(StreamInput in) throws IOException {
|
||||
public static CompressedXContent readCompressedString(StreamInput in) throws IOException {
|
||||
byte[] bytes = new byte[in.readVInt()];
|
||||
in.readBytes(bytes, 0, bytes.length);
|
||||
return new CompressedString(bytes);
|
||||
return new CompressedXContent(bytes);
|
||||
}
|
||||
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
|
@ -100,7 +103,7 @@ public final class CompressedString {
|
|||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
CompressedString that = (CompressedString) o;
|
||||
CompressedXContent that = (CompressedXContent) o;
|
||||
|
||||
if (Arrays.equals(compressed(), that.compressed())) {
|
||||
return true;
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.store.IndexInput;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -32,32 +31,20 @@ import java.io.IOException;
|
|||
*/
|
||||
public interface Compressor {
|
||||
|
||||
String type();
|
||||
|
||||
void configure(Settings settings);
|
||||
|
||||
boolean isCompressed(BytesReference bytes);
|
||||
|
||||
boolean isCompressed(byte[] data, int offset, int length);
|
||||
|
||||
boolean isCompressed(ChannelBuffer buffer);
|
||||
|
||||
StreamInput streamInput(StreamInput in) throws IOException;
|
||||
|
||||
StreamOutput streamOutput(StreamOutput out) throws IOException;
|
||||
|
||||
/**
|
||||
* @deprecated Used for backward comp. since we now use Lucene compressed codec.
|
||||
*/
|
||||
@Deprecated
|
||||
boolean isCompressed(IndexInput in) throws IOException;
|
||||
|
||||
/**
|
||||
* Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
|
||||
*/
|
||||
byte[] uncompress(byte[] data, int offset, int length) throws IOException;
|
||||
|
||||
/**
|
||||
* Compresses the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
|
||||
*/
|
||||
byte[] compress(byte[] data, int offset, int length) throws IOException;
|
||||
|
||||
CompressedStreamInput streamInput(StreamInput in) throws IOException;
|
||||
|
||||
CompressedStreamOutput streamOutput(StreamOutput out) throws IOException;
|
||||
|
||||
/**
|
||||
* @deprecated Used for backward comp. since we now use Lucene compressed codec.
|
||||
*/
|
||||
|
|
|
@ -19,68 +19,36 @@
|
|||
|
||||
package org.elasticsearch.common.compress;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.deflate.DeflateCompressor;
|
||||
import org.elasticsearch.common.compress.lzf.LZFCompressor;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class CompressorFactory {
|
||||
|
||||
private static final LZFCompressor LZF = new LZFCompressor();
|
||||
|
||||
private static final Compressor[] compressors;
|
||||
private static final ImmutableMap<String, Compressor> compressorsByType;
|
||||
private static Compressor defaultCompressor;
|
||||
private static volatile Compressor defaultCompressor;
|
||||
|
||||
static {
|
||||
List<Compressor> compressorsX = Lists.newArrayList();
|
||||
compressorsX.add(LZF);
|
||||
|
||||
compressors = compressorsX.toArray(new Compressor[compressorsX.size()]);
|
||||
MapBuilder<String, Compressor> compressorsByTypeX = MapBuilder.newMapBuilder();
|
||||
for (Compressor compressor : compressors) {
|
||||
compressorsByTypeX.put(compressor.type(), compressor);
|
||||
}
|
||||
compressorsByType = compressorsByTypeX.immutableMap();
|
||||
|
||||
defaultCompressor = LZF;
|
||||
compressors = new Compressor[] {
|
||||
new LZFCompressor(),
|
||||
new DeflateCompressor()
|
||||
};
|
||||
defaultCompressor = new DeflateCompressor();
|
||||
}
|
||||
|
||||
public static synchronized void configure(Settings settings) {
|
||||
for (Compressor compressor : compressors) {
|
||||
compressor.configure(settings);
|
||||
}
|
||||
String defaultType = settings.get("compress.default.type", "lzf").toLowerCase(Locale.ENGLISH);
|
||||
boolean found = false;
|
||||
for (Compressor compressor : compressors) {
|
||||
if (defaultType.equalsIgnoreCase(compressor.type())) {
|
||||
defaultCompressor = compressor;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
Loggers.getLogger(CompressorFactory.class).warn("failed to find default type [{}]", defaultType);
|
||||
}
|
||||
}
|
||||
|
||||
public static synchronized void setDefaultCompressor(Compressor defaultCompressor) {
|
||||
public static void setDefaultCompressor(Compressor defaultCompressor) {
|
||||
CompressorFactory.defaultCompressor = defaultCompressor;
|
||||
}
|
||||
|
||||
|
@ -92,14 +60,10 @@ public class CompressorFactory {
|
|||
return compressor(bytes) != null;
|
||||
}
|
||||
|
||||
public static boolean isCompressed(byte[] data) {
|
||||
return compressor(data, 0, data.length) != null;
|
||||
}
|
||||
|
||||
public static boolean isCompressed(byte[] data, int offset, int length) {
|
||||
return compressor(data, offset, length) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated we don't compress lucene indexes anymore and rely on lucene codecs
|
||||
*/
|
||||
@Deprecated
|
||||
public static boolean isCompressed(IndexInput in) throws IOException {
|
||||
return compressor(in) != null;
|
||||
}
|
||||
|
@ -108,37 +72,35 @@ public class CompressorFactory {
|
|||
public static Compressor compressor(BytesReference bytes) {
|
||||
for (Compressor compressor : compressors) {
|
||||
if (compressor.isCompressed(bytes)) {
|
||||
// bytes should be either detected as compressed or as xcontent,
|
||||
// if we have bytes that can be either detected as compressed or
|
||||
// as a xcontent, we have a problem
|
||||
assert XContentFactory.xContentType(bytes) == null;
|
||||
return compressor;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static Compressor compressor(byte[] data) {
|
||||
return compressor(data, 0, data.length);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static Compressor compressor(byte[] data, int offset, int length) {
|
||||
for (Compressor compressor : compressors) {
|
||||
if (compressor.isCompressed(data, offset, length)) {
|
||||
return compressor;
|
||||
}
|
||||
XContentType contentType = XContentFactory.xContentType(bytes);
|
||||
if (contentType == null) {
|
||||
throw new NotXContentException("Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes");
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static Compressor compressor(ChannelBuffer buffer) {
|
||||
for (Compressor compressor : compressors) {
|
||||
if (compressor.isCompressed(buffer)) {
|
||||
return compressor;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
throw new NotCompressedException();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated we don't compress lucene indexes anymore and rely on lucene codecs
|
||||
*/
|
||||
@Deprecated
|
||||
@Nullable
|
||||
public static Compressor compressor(IndexInput in) throws IOException {
|
||||
for (Compressor compressor : compressors) {
|
||||
|
@ -149,25 +111,35 @@ public class CompressorFactory {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static Compressor compressor(String type) {
|
||||
return compressorsByType.get(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uncompress the provided data, data can be detected as compressed using {@link #isCompressed(byte[], int, int)}.
|
||||
*/
|
||||
public static BytesReference uncompressIfNeeded(BytesReference bytes) throws IOException {
|
||||
Compressor compressor = compressor(bytes);
|
||||
BytesReference uncompressed;
|
||||
if (compressor != null) {
|
||||
if (bytes.hasArray()) {
|
||||
return new BytesArray(compressor.uncompress(bytes.array(), bytes.arrayOffset(), bytes.length()));
|
||||
}
|
||||
StreamInput compressed = compressor.streamInput(bytes.streamInput());
|
||||
BytesStreamOutput bStream = new BytesStreamOutput();
|
||||
Streams.copy(compressed, bStream);
|
||||
compressed.close();
|
||||
return bStream.bytes();
|
||||
uncompressed = uncompress(bytes, compressor);
|
||||
} else {
|
||||
uncompressed = bytes;
|
||||
}
|
||||
return bytes;
|
||||
|
||||
return uncompressed;
|
||||
}
|
||||
|
||||
/** Decompress the provided {@link BytesReference}. */
|
||||
public static BytesReference uncompress(BytesReference bytes) throws IOException {
|
||||
Compressor compressor = compressor(bytes);
|
||||
if (compressor == null) {
|
||||
throw new NotCompressedException();
|
||||
}
|
||||
return uncompress(bytes, compressor);
|
||||
}
|
||||
|
||||
private static BytesReference uncompress(BytesReference bytes, Compressor compressor) throws IOException {
|
||||
StreamInput compressed = compressor.streamInput(bytes.streamInput());
|
||||
BytesStreamOutput bStream = new BytesStreamOutput();
|
||||
Streams.copy(compressed, bStream);
|
||||
compressed.close();
|
||||
return bStream.bytes();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,13 @@
|
|||
|
||||
package org.elasticsearch.common.compress;
|
||||
|
||||
/**
|
||||
*/
|
||||
public interface CompressorContext {
|
||||
/** Exception indicating that we were expecting something compressed, which
|
||||
* was not compressed or corrupted so that the compression format could not
|
||||
* be detected. */
|
||||
public class NotCompressedException extends RuntimeException {
|
||||
|
||||
public NotCompressedException() {
|
||||
super();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.compress;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
|
||||
/** Exception indicating that we were expecting some {@link XContent} but could
|
||||
* not detect its type. */
|
||||
public class NotXContentException extends RuntimeException {
|
||||
|
||||
public NotXContentException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,156 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.compress.deflate;
|
||||
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedIndexInput;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.zip.Deflater;
|
||||
import java.util.zip.DeflaterOutputStream;
|
||||
import java.util.zip.Inflater;
|
||||
import java.util.zip.InflaterInputStream;
|
||||
|
||||
/**
|
||||
* {@link Compressor} implementation based on the DEFLATE compression algorithm.
|
||||
*/
|
||||
public class DeflateCompressor implements Compressor {
|
||||
|
||||
// An arbitrary header that we use to identify compressed streams
|
||||
// It needs to be different from other compressors and to not be specific
|
||||
// enough so that no stream starting with these bytes could be detected as
|
||||
// a XContent
|
||||
private static final byte[] HEADER = new byte[] { 'D', 'F', 'L', '\0' };
|
||||
// 3 is a good trade-off between speed and compression ratio
|
||||
private static final int LEVEL = 3;
|
||||
// We use buffering on the input and ouput of in/def-laters in order to
|
||||
// limit the number of JNI calls
|
||||
private static final int BUFFER_SIZE = 4096;
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(BytesReference bytes) {
|
||||
if (bytes.length() < HEADER.length) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < HEADER.length; ++i) {
|
||||
if (bytes.get(i) != HEADER[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(ChannelBuffer buffer) {
|
||||
if (buffer.readableBytes() < HEADER.length) {
|
||||
return false;
|
||||
}
|
||||
final int offset = buffer.readerIndex();
|
||||
for (int i = 0; i < HEADER.length; ++i) {
|
||||
if (buffer.getByte(offset + i) != HEADER[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamInput streamInput(StreamInput in) throws IOException {
|
||||
final byte[] headerBytes = new byte[HEADER.length];
|
||||
int len = 0;
|
||||
while (len < headerBytes.length) {
|
||||
final int read = in.read(headerBytes, len, headerBytes.length - len);
|
||||
if (read == -1) {
|
||||
break;
|
||||
}
|
||||
len += read;
|
||||
}
|
||||
if (len != HEADER.length || Arrays.equals(headerBytes, HEADER) == false) {
|
||||
throw new IllegalArgumentException("Input stream is not compressed with DEFLATE!");
|
||||
}
|
||||
|
||||
final boolean nowrap = true;
|
||||
final Inflater inflater = new Inflater(nowrap);
|
||||
InputStream decompressedIn = new InflaterInputStream(in, inflater, BUFFER_SIZE);
|
||||
decompressedIn = new BufferedInputStream(decompressedIn, BUFFER_SIZE);
|
||||
return new InputStreamStreamInput(decompressedIn) {
|
||||
private boolean closed = false;
|
||||
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
super.close();
|
||||
} finally {
|
||||
if (closed == false) {
|
||||
// important to release native memory
|
||||
inflater.end();
|
||||
closed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamOutput streamOutput(StreamOutput out) throws IOException {
|
||||
out.writeBytes(HEADER);
|
||||
final boolean nowrap = true;
|
||||
final Deflater deflater = new Deflater(LEVEL, nowrap);
|
||||
final boolean syncFlush = true;
|
||||
OutputStream compressedOut = new DeflaterOutputStream(out, deflater, BUFFER_SIZE, syncFlush);
|
||||
compressedOut = new BufferedOutputStream(compressedOut, BUFFER_SIZE);
|
||||
return new OutputStreamStreamOutput(compressedOut) {
|
||||
private boolean closed = false;
|
||||
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
super.close();
|
||||
} finally {
|
||||
if (closed == false) {
|
||||
// important to release native memory
|
||||
deflater.end();
|
||||
closed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(IndexInput in) throws IOException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompressedIndexInput indexInput(IndexInput in) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -32,14 +32,14 @@ import java.util.Arrays;
|
|||
/**
|
||||
*/
|
||||
@Deprecated
|
||||
public class LZFCompressedIndexInput extends CompressedIndexInput<LZFCompressorContext> {
|
||||
public class LZFCompressedIndexInput extends CompressedIndexInput {
|
||||
|
||||
private final ChunkDecoder decoder;
|
||||
// scratch area buffer
|
||||
private byte[] inputBuffer;
|
||||
|
||||
public LZFCompressedIndexInput(IndexInput in, ChunkDecoder decoder) throws IOException {
|
||||
super(in, LZFCompressorContext.INSTANCE);
|
||||
super(in);
|
||||
|
||||
this.decoder = decoder;
|
||||
this.uncompressed = new byte[LZFChunk.MAX_CHUNK_LEN];
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.io.IOException;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class LZFCompressedStreamInput extends CompressedStreamInput<LZFCompressorContext> {
|
||||
public class LZFCompressedStreamInput extends CompressedStreamInput {
|
||||
|
||||
private final BufferRecycler recycler;
|
||||
|
||||
|
@ -39,7 +39,7 @@ public class LZFCompressedStreamInput extends CompressedStreamInput<LZFCompresso
|
|||
private byte[] inputBuffer;
|
||||
|
||||
public LZFCompressedStreamInput(StreamInput in, ChunkDecoder decoder) throws IOException {
|
||||
super(in, LZFCompressorContext.INSTANCE);
|
||||
super(in);
|
||||
this.recycler = BufferRecycler.instance();
|
||||
this.decoder = decoder;
|
||||
|
||||
|
|
|
@ -21,30 +21,27 @@ package org.elasticsearch.common.compress.lzf;
|
|||
|
||||
import com.ning.compress.lzf.ChunkDecoder;
|
||||
import com.ning.compress.lzf.LZFChunk;
|
||||
import com.ning.compress.lzf.LZFEncoder;
|
||||
import com.ning.compress.lzf.util.ChunkDecoderFactory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedIndexInput;
|
||||
import org.elasticsearch.common.compress.CompressedStreamInput;
|
||||
import org.elasticsearch.common.compress.CompressedStreamOutput;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.compress.deflate.DeflateCompressor;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.jboss.netty.buffer.ChannelBuffer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link DeflateCompressor} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public class LZFCompressor implements Compressor {
|
||||
|
||||
static final byte[] LUCENE_HEADER = {'L', 'Z', 'F', 0};
|
||||
|
||||
public static final String TYPE = "lzf";
|
||||
|
||||
private ChunkDecoder decoder;
|
||||
|
||||
public LZFCompressor() {
|
||||
|
@ -53,14 +50,6 @@ public class LZFCompressor implements Compressor {
|
|||
this.decoder.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Settings settings) {}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(BytesReference bytes) {
|
||||
return bytes.length() >= 3 &&
|
||||
|
@ -69,14 +58,6 @@ public class LZFCompressor implements Compressor {
|
|||
(bytes.get(2) == LZFChunk.BLOCK_TYPE_COMPRESSED || bytes.get(2) == LZFChunk.BLOCK_TYPE_NON_COMPRESSED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(byte[] data, int offset, int length) {
|
||||
return length >= 3 &&
|
||||
data[offset] == LZFChunk.BYTE_Z &&
|
||||
data[offset + 1] == LZFChunk.BYTE_V &&
|
||||
(data[offset + 2] == LZFChunk.BLOCK_TYPE_COMPRESSED || data[offset + 2] == LZFChunk.BLOCK_TYPE_NON_COMPRESSED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCompressed(ChannelBuffer buffer) {
|
||||
int offset = buffer.readerIndex();
|
||||
|
@ -104,23 +85,13 @@ public class LZFCompressor implements Compressor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public byte[] uncompress(byte[] data, int offset, int length) throws IOException {
|
||||
return decoder.decode(data, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] compress(byte[] data, int offset, int length) throws IOException {
|
||||
return LZFEncoder.safeEncode(data, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompressedStreamInput streamInput(StreamInput in) throws IOException {
|
||||
public StreamInput streamInput(StreamInput in) throws IOException {
|
||||
return new LZFCompressedStreamInput(in, decoder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompressedStreamOutput streamOutput(StreamOutput out) throws IOException {
|
||||
return new LZFCompressedStreamOutput(out);
|
||||
public StreamOutput streamOutput(StreamOutput out) throws IOException {
|
||||
throw new UnsupportedOperationException("LZF is only here for back compat, no write support");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -83,8 +83,9 @@ public final class PathUtils {
|
|||
*/
|
||||
public static Path get(Path[] roots, String path) {
|
||||
for (Path root : roots) {
|
||||
Path normalizedPath = root.resolve(path).normalize();
|
||||
if(normalizedPath.startsWith(root)) {
|
||||
Path normalizedRoot = root.normalize();
|
||||
Path normalizedPath = normalizedRoot.resolve(path).normalize();
|
||||
if(normalizedPath.startsWith(normalizedRoot)) {
|
||||
return normalizedPath;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,16 @@ public class InputStreamStreamInput extends StreamInput {
|
|||
is.reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean markSupported() {
|
||||
return is.markSupported();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mark(int readlimit) {
|
||||
is.mark(readlimit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
is.close();
|
||||
|
|
|
@ -24,11 +24,11 @@ import org.apache.lucene.search.Explanation;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.script.ExplainableSearchScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class ScriptScoreFunction extends ScoreFunction {
|
||||
|
||||
|
@ -71,17 +71,14 @@ public class ScriptScoreFunction extends ScoreFunction {
|
|||
}
|
||||
}
|
||||
|
||||
private final String sScript;
|
||||
|
||||
private final Map<String, Object> params;
|
||||
private final Script sScript;
|
||||
|
||||
private final SearchScript script;
|
||||
|
||||
|
||||
public ScriptScoreFunction(String sScript, Map<String, Object> params, SearchScript script) {
|
||||
public ScriptScoreFunction(Script sScript, SearchScript script) {
|
||||
super(CombineFunction.REPLACE);
|
||||
this.sScript = sScript;
|
||||
this.params = params;
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
|
@ -114,8 +111,8 @@ public class ScriptScoreFunction extends ScoreFunction {
|
|||
} else {
|
||||
double score = score(docId, subQueryScore.getValue());
|
||||
String explanation = "script score function, computed with script:\"" + sScript;
|
||||
if (params != null) {
|
||||
explanation += "\" and parameters: \n" + params.toString();
|
||||
if (sScript.getParams() != null) {
|
||||
explanation += "\" and parameters: \n" + sScript.getParams().toString();
|
||||
}
|
||||
Explanation scoreExp = Explanation.match(
|
||||
subQueryScore.getValue(), "_score: ",
|
||||
|
@ -131,7 +128,7 @@ public class ScriptScoreFunction extends ScoreFunction {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "script[" + sScript + "], params [" + params + "]";
|
||||
return "script" + sScript.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -1211,7 +1211,7 @@ public final class Settings implements ToXContent {
|
|||
* tries and resolve it against an environment variable ({@link System#getenv(String)}), and last, tries
|
||||
* and replace it with another setting already set on this builder.
|
||||
*/
|
||||
public Builder replacePropertyPlaceholders() {
|
||||
public Builder replacePropertyPlaceholders(String... ignoredValues) {
|
||||
PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false);
|
||||
PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() {
|
||||
@Override
|
||||
|
@ -1241,7 +1241,19 @@ public final class Settings implements ToXContent {
|
|||
}
|
||||
};
|
||||
for (Map.Entry<String, String> entry : Maps.newHashMap(map).entrySet()) {
|
||||
String value = propertyPlaceholder.replacePlaceholders(entry.getValue(), placeholderResolver);
|
||||
String possiblePlaceholder = entry.getValue();
|
||||
boolean ignored = false;
|
||||
for (String ignoredValue : ignoredValues) {
|
||||
if (ignoredValue.equals(possiblePlaceholder)) {
|
||||
ignored = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (ignored) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String value = propertyPlaceholder.replacePlaceholders(possiblePlaceholder, placeholderResolver);
|
||||
// if the values exists and has length, we should maintain it in the map
|
||||
// otherwise, the replace process resolved into removing it
|
||||
if (Strings.hasLength(value)) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.xcontent;
|
|||
|
||||
import com.fasterxml.jackson.dataformat.cbor.CBORConstants;
|
||||
import com.fasterxml.jackson.dataformat.smile.SmileConstants;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -163,6 +164,9 @@ public class XContentFactory {
|
|||
if (c == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (Character.isWhitespace(c) == false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -204,65 +208,76 @@ public class XContentFactory {
|
|||
}
|
||||
|
||||
/**
|
||||
* Guesses the content type based on the provided input stream.
|
||||
* Guesses the content type based on the provided input stream without consuming it.
|
||||
*/
|
||||
public static XContentType xContentType(InputStream si) throws IOException {
|
||||
final int firstInt = si.read(); // this must be an int since we need to respect the method contract
|
||||
if (firstInt == -1) {
|
||||
return null;
|
||||
if (si.markSupported() == false) {
|
||||
throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass());
|
||||
}
|
||||
|
||||
final int secondInt = si.read(); // this must be an int since we need to respect the method contract
|
||||
if (secondInt == -1) {
|
||||
return null;
|
||||
}
|
||||
final byte first = (byte) (0xff & firstInt);
|
||||
final byte second = (byte) (0xff & secondInt);
|
||||
if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) {
|
||||
int third = si.read();
|
||||
if (third == SmileConstants.HEADER_BYTE_3) {
|
||||
return XContentType.SMILE;
|
||||
}
|
||||
}
|
||||
if (first == '{' || second == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (first == '-' && second == '-') {
|
||||
int third = si.read();
|
||||
if (third == '-') {
|
||||
return XContentType.YAML;
|
||||
}
|
||||
}
|
||||
// CBOR logic similar to CBORFactory#hasCBORFormat
|
||||
if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) {
|
||||
// Actually, specific "self-describe tag" is a very good indicator
|
||||
int third = si.read();
|
||||
if (third == -1) {
|
||||
si.mark(GUESS_HEADER_LENGTH);
|
||||
try {
|
||||
final int firstInt = si.read(); // this must be an int since we need to respect the method contract
|
||||
if (firstInt == -1) {
|
||||
return null;
|
||||
}
|
||||
if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) {
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
}
|
||||
// for small objects, some encoders just encode as major type object, we can safely
|
||||
// say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort
|
||||
if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) {
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
|
||||
for (int i = 2; i < GUESS_HEADER_LENGTH; i++) {
|
||||
int val = si.read();
|
||||
if (val == -1) {
|
||||
return null;
|
||||
final int secondInt = si.read(); // this must be an int since we need to respect the method contract
|
||||
if (secondInt == -1) {
|
||||
return null;
|
||||
}
|
||||
if (val == '{') {
|
||||
final byte first = (byte) (0xff & firstInt);
|
||||
final byte second = (byte) (0xff & secondInt);
|
||||
if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) {
|
||||
int third = si.read();
|
||||
if (third == SmileConstants.HEADER_BYTE_3) {
|
||||
return XContentType.SMILE;
|
||||
}
|
||||
}
|
||||
if (first == '{' || second == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (first == '-' && second == '-') {
|
||||
int third = si.read();
|
||||
if (third == '-') {
|
||||
return XContentType.YAML;
|
||||
}
|
||||
}
|
||||
// CBOR logic similar to CBORFactory#hasCBORFormat
|
||||
if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) {
|
||||
// Actually, specific "self-describe tag" is a very good indicator
|
||||
int third = si.read();
|
||||
if (third == -1) {
|
||||
return null;
|
||||
}
|
||||
if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) {
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
}
|
||||
// for small objects, some encoders just encode as major type object, we can safely
|
||||
// say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort
|
||||
if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) {
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
|
||||
for (int i = 2; i < GUESS_HEADER_LENGTH; i++) {
|
||||
int val = si.read();
|
||||
if (val == -1) {
|
||||
return null;
|
||||
}
|
||||
if (val == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (Character.isWhitespace(val) == false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} finally {
|
||||
si.reset();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -284,7 +299,7 @@ public class XContentFactory {
|
|||
* Guesses the content type based on the provided bytes.
|
||||
*/
|
||||
public static XContentType xContentType(BytesReference bytes) {
|
||||
int length = bytes.length() < GUESS_HEADER_LENGTH ? bytes.length() : GUESS_HEADER_LENGTH;
|
||||
int length = bytes.length();
|
||||
if (length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
@ -316,9 +331,13 @@ public class XContentFactory {
|
|||
|
||||
// a last chance for JSON
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (bytes.get(i) == '{') {
|
||||
byte b = bytes.get(i);
|
||||
if (b == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (Character.isWhitespace(b) == false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -28,14 +28,14 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedStreamInput;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -49,45 +49,30 @@ import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
|
|||
public class XContentHelper {
|
||||
|
||||
public static XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
if (bytes.hasArray()) {
|
||||
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
}
|
||||
Compressor compressor = CompressorFactory.compressor(bytes);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedInput = compressor.streamInput(bytes.streamInput());
|
||||
InputStream compressedInput = compressor.streamInput(bytes.streamInput());
|
||||
if (compressedInput.markSupported() == false) {
|
||||
compressedInput = new BufferedInputStream(compressedInput);
|
||||
}
|
||||
XContentType contentType = XContentFactory.xContentType(compressedInput);
|
||||
compressedInput.resetToBufferStart();
|
||||
return XContentFactory.xContent(contentType).createParser(compressedInput);
|
||||
} else {
|
||||
return XContentFactory.xContent(bytes).createParser(bytes.streamInput());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static XContentParser createParser(byte[] data, int offset, int length) throws IOException {
|
||||
Compressor compressor = CompressorFactory.compressor(data, offset, length);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedInput = compressor.streamInput(StreamInput.wrap(data, offset, length));
|
||||
XContentType contentType = XContentFactory.xContentType(compressedInput);
|
||||
compressedInput.resetToBufferStart();
|
||||
return XContentFactory.xContent(contentType).createParser(compressedInput);
|
||||
} else {
|
||||
return XContentFactory.xContent(data, offset, length).createParser(data, offset, length);
|
||||
}
|
||||
}
|
||||
|
||||
public static Tuple<XContentType, Map<String, Object>> convertToMap(BytesReference bytes, boolean ordered) throws ElasticsearchParseException {
|
||||
if (bytes.hasArray()) {
|
||||
return convertToMap(bytes.array(), bytes.arrayOffset(), bytes.length(), ordered);
|
||||
}
|
||||
try {
|
||||
XContentParser parser;
|
||||
XContentType contentType;
|
||||
Compressor compressor = CompressorFactory.compressor(bytes);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(bytes.streamInput());
|
||||
InputStream compressedStreamInput = compressor.streamInput(bytes.streamInput());
|
||||
if (compressedStreamInput.markSupported() == false) {
|
||||
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
|
||||
}
|
||||
contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
|
||||
} else {
|
||||
contentType = XContentFactory.xContentType(bytes);
|
||||
|
@ -103,34 +88,6 @@ public class XContentHelper {
|
|||
}
|
||||
}
|
||||
|
||||
public static Tuple<XContentType, Map<String, Object>> convertToMap(byte[] data, boolean ordered) throws ElasticsearchParseException {
|
||||
return convertToMap(data, 0, data.length, ordered);
|
||||
}
|
||||
|
||||
public static Tuple<XContentType, Map<String, Object>> convertToMap(byte[] data, int offset, int length, boolean ordered) throws ElasticsearchParseException {
|
||||
try {
|
||||
XContentParser parser;
|
||||
XContentType contentType;
|
||||
Compressor compressor = CompressorFactory.compressor(data, offset, length);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(StreamInput.wrap(data, offset, length));
|
||||
contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
|
||||
} else {
|
||||
contentType = XContentFactory.xContentType(data, offset, length);
|
||||
parser = XContentFactory.xContent(contentType).createParser(data, offset, length);
|
||||
}
|
||||
if (ordered) {
|
||||
return Tuple.tuple(contentType, parser.mapOrderedAndClose());
|
||||
} else {
|
||||
return Tuple.tuple(contentType, parser.mapAndClose());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("Failed to parse content to map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static String convertToJson(BytesReference bytes, boolean reformatJson) throws IOException {
|
||||
return convertToJson(bytes, reformatJson, false);
|
||||
}
|
||||
|
@ -426,9 +383,11 @@ public class XContentHelper {
|
|||
public static void writeDirect(BytesReference source, XContentBuilder rawBuilder, ToXContent.Params params) throws IOException {
|
||||
Compressor compressor = CompressorFactory.compressor(source);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
InputStream compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
if (compressedStreamInput.markSupported() == false) {
|
||||
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
|
||||
}
|
||||
XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
if (contentType == rawBuilder.contentType()) {
|
||||
Streams.copy(compressedStreamInput, rawBuilder.stream());
|
||||
} else {
|
||||
|
@ -457,9 +416,11 @@ public class XContentHelper {
|
|||
public static void writeRawField(String field, BytesReference source, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
Compressor compressor = CompressorFactory.compressor(source);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
InputStream compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
if (compressedStreamInput.markSupported() == false) {
|
||||
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
|
||||
}
|
||||
XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
if (contentType == builder.contentType()) {
|
||||
builder.rawField(field, compressedStreamInput);
|
||||
} else {
|
||||
|
|
|
@ -19,11 +19,15 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.cbor.CborXContent;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.smile.SmileXContent;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* The content type of {@link org.elasticsearch.common.xcontent.XContent}.
|
||||
*/
|
||||
|
@ -144,4 +148,18 @@ public enum XContentType {
|
|||
public abstract String shortName();
|
||||
|
||||
public abstract XContent xContent();
|
||||
|
||||
public static XContentType readFrom(StreamInput in) throws IOException {
|
||||
int index = in.readVInt();
|
||||
for (XContentType contentType : values()) {
|
||||
if (index == contentType.index) {
|
||||
return contentType;
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("Unknown XContentType with index [" + index + "]");
|
||||
}
|
||||
|
||||
public static void writeTo(XContentType contentType, StreamOutput out) throws IOException {
|
||||
out.writeVInt(contentType.index);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -227,21 +227,21 @@ public class PublishClusterStateAction extends AbstractComponent {
|
|||
|
||||
public static BytesReference serializeFullClusterState(ClusterState clusterState, Version nodeVersion) throws IOException {
|
||||
BytesStreamOutput bStream = new BytesStreamOutput();
|
||||
StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream);
|
||||
stream.setVersion(nodeVersion);
|
||||
stream.writeBoolean(true);
|
||||
clusterState.writeTo(stream);
|
||||
stream.close();
|
||||
try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) {
|
||||
stream.setVersion(nodeVersion);
|
||||
stream.writeBoolean(true);
|
||||
clusterState.writeTo(stream);
|
||||
}
|
||||
return bStream.bytes();
|
||||
}
|
||||
|
||||
public static BytesReference serializeDiffClusterState(Diff diff, Version nodeVersion) throws IOException {
|
||||
BytesStreamOutput bStream = new BytesStreamOutput();
|
||||
StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream);
|
||||
stream.setVersion(nodeVersion);
|
||||
stream.writeBoolean(false);
|
||||
diff.writeTo(stream);
|
||||
stream.close();
|
||||
try (StreamOutput stream = CompressorFactory.defaultCompressor().streamOutput(bStream)) {
|
||||
stream.setVersion(nodeVersion);
|
||||
stream.writeBoolean(false);
|
||||
diff.writeTo(stream);
|
||||
}
|
||||
return bStream.bytes();
|
||||
}
|
||||
|
||||
|
|
|
@ -56,6 +56,7 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
|
|||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -166,12 +167,12 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
|
||||
AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetch = asyncFetchStarted.get(shard.shardId());
|
||||
if (fetch == null) {
|
||||
fetch = new InternalAsyncFetch<>(logger, "shard_started", shard.shardId(), startedAction, clusterService, allocationService);
|
||||
fetch = new InternalAsyncFetch<>(logger, "shard_started", shard.shardId(), startedAction);
|
||||
asyncFetchStarted.put(shard.shardId(), fetch);
|
||||
}
|
||||
AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState = fetch.fetchData(nodes, metaData, allocation.getIgnoreNodes(shard.shardId()));
|
||||
if (shardState.hasData() == false) {
|
||||
logger.trace("{}: ignoring allocation, still fetching shard started state");
|
||||
logger.trace("{}: ignoring allocation, still fetching shard started state", shard);
|
||||
unassignedIterator.remove();
|
||||
routingNodes.ignoredUnassigned().add(shard);
|
||||
continue;
|
||||
|
@ -395,7 +396,7 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
}
|
||||
|
||||
if (!canBeAllocatedToAtLeastOneNode) {
|
||||
logger.trace("{}: ignoring allocation, can't be allocated on any node");
|
||||
logger.trace("{}: ignoring allocation, can't be allocated on any node", shard);
|
||||
unassignedIterator.remove();
|
||||
routingNodes.ignoredUnassigned().add(shard);
|
||||
continue;
|
||||
|
@ -403,12 +404,12 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
|
||||
AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetch = asyncFetchStore.get(shard.shardId());
|
||||
if (fetch == null) {
|
||||
fetch = new InternalAsyncFetch<>(logger, "shard_store", shard.shardId(), storeAction, clusterService, allocationService);
|
||||
fetch = new InternalAsyncFetch<>(logger, "shard_store", shard.shardId(), storeAction);
|
||||
asyncFetchStore.put(shard.shardId(), fetch);
|
||||
}
|
||||
AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> shardStores = fetch.fetchData(nodes, metaData, allocation.getIgnoreNodes(shard.shardId()));
|
||||
if (shardStores.hasData() == false) {
|
||||
logger.trace("{}: ignoring allocation, still fetching shard stores");
|
||||
logger.trace("{}: ignoring allocation, still fetching shard stores", shard);
|
||||
unassignedIterator.remove();
|
||||
routingNodes.ignoredUnassigned().add(shard);
|
||||
continue; // still fetching
|
||||
|
@ -513,23 +514,24 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
return changed;
|
||||
}
|
||||
|
||||
static class InternalAsyncFetch<T extends BaseNodeResponse> extends AsyncShardFetch<T> {
|
||||
private final AtomicBoolean rerouting = new AtomicBoolean();
|
||||
|
||||
private final ClusterService clusterService;
|
||||
private final AllocationService allocationService;
|
||||
class InternalAsyncFetch<T extends BaseNodeResponse> extends AsyncShardFetch<T> {
|
||||
|
||||
public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List<? extends BaseNodesResponse<T>, T> action,
|
||||
ClusterService clusterService, AllocationService allocationService) {
|
||||
public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List<? extends BaseNodesResponse<T>, T> action) {
|
||||
super(logger, type, shardId, action);
|
||||
this.clusterService = clusterService;
|
||||
this.allocationService = allocationService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void reroute(ShardId shardId, String reason) {
|
||||
clusterService.submitStateUpdateTask("async_shard_fetch(" + type + ") " + shardId + ", reasons (" + reason + ")", Priority.HIGH, new ClusterStateUpdateTask() {
|
||||
if (rerouting.compareAndSet(false, true) == false) {
|
||||
logger.trace("{} already has pending reroute, ignoring {}", shardId, reason);
|
||||
return;
|
||||
}
|
||||
clusterService.submitStateUpdateTask("async_shard_fetch", Priority.HIGH, new ClusterStateUpdateTask() {
|
||||
@Override
|
||||
public ClusterState execute(ClusterState currentState) throws Exception {
|
||||
rerouting.set(false);
|
||||
if (currentState.nodes().masterNode() == null) {
|
||||
return currentState;
|
||||
}
|
||||
|
|
|
@ -21,16 +21,26 @@ package org.elasticsearch.gateway;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.Collections2;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexFormatTooNewException;
|
||||
import org.apache.lucene.index.IndexFormatTooOldException;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.OutputStreamIndexOutput;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
@ -280,7 +290,7 @@ public abstract class MetaDataStateFormat<T> {
|
|||
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
|
||||
continue;
|
||||
}
|
||||
parser = XContentHelper.createParser(data, 0, data.length);
|
||||
parser = XContentHelper.createParser(new BytesArray(data));
|
||||
state = fromXContent(parser);
|
||||
if (state == null) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
|
||||
|
|
|
@ -53,6 +53,10 @@ public abstract class AbstractIndexComponent implements IndexComponent {
|
|||
return this.index;
|
||||
}
|
||||
|
||||
public Settings indexSettings() {
|
||||
return indexSettings;
|
||||
}
|
||||
|
||||
public String nodeName() {
|
||||
return indexSettings.get("name", "");
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.aliases;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -30,11 +30,11 @@ public class IndexAlias {
|
|||
|
||||
private final String alias;
|
||||
|
||||
private final CompressedString filter;
|
||||
private final CompressedXContent filter;
|
||||
|
||||
private final Query parsedFilter;
|
||||
|
||||
public IndexAlias(String alias, @Nullable CompressedString filter, @Nullable Query parsedFilter) {
|
||||
public IndexAlias(String alias, @Nullable CompressedXContent filter, @Nullable Query parsedFilter) {
|
||||
this.alias = alias;
|
||||
this.filter = filter;
|
||||
this.parsedFilter = parsedFilter;
|
||||
|
@ -45,7 +45,7 @@ public class IndexAlias {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
public CompressedString filter() {
|
||||
public CompressedXContent filter() {
|
||||
return filter;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
|
@ -63,11 +63,11 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
|
|||
return aliases.get(alias);
|
||||
}
|
||||
|
||||
public IndexAlias create(String alias, @Nullable CompressedString filter) {
|
||||
public IndexAlias create(String alias, @Nullable CompressedXContent filter) {
|
||||
return new IndexAlias(alias, filter, parse(alias, filter));
|
||||
}
|
||||
|
||||
public void add(String alias, @Nullable CompressedString filter) {
|
||||
public void add(String alias, @Nullable CompressedXContent filter) {
|
||||
add(new IndexAlias(alias, filter, parse(alias, filter)));
|
||||
}
|
||||
|
||||
|
@ -120,7 +120,7 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
|
|||
aliases.remove(alias);
|
||||
}
|
||||
|
||||
private Query parse(String alias, CompressedString filter) {
|
||||
private Query parse(String alias, CompressedXContent filter) {
|
||||
if (filter == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -21,16 +21,10 @@ package org.elasticsearch.index.fielddata.plain;
|
|||
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.ImmutableSortedSet;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -39,6 +33,7 @@ import org.apache.lucene.util.PagedBytes;
|
|||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.apache.lucene.util.packed.PackedLongValues;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
@ -47,14 +42,8 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
|
||||
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.*;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.RamAccountingTermsEnum;
|
||||
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
|
@ -71,17 +60,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
|||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableSet;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
|
@ -90,7 +69,7 @@ import java.util.concurrent.TimeUnit;
|
|||
*/
|
||||
public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicParentChildFieldData> implements IndexParentChildFieldData, DocumentTypeListener {
|
||||
|
||||
private final NavigableSet<BytesRef> parentTypes;
|
||||
private final NavigableSet<String> parentTypes;
|
||||
private final CircuitBreakerService breakerService;
|
||||
|
||||
// If child type (a type with _parent field) is added or removed, we want to make sure modifications don't happen
|
||||
|
@ -101,7 +80,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
|
||||
CircuitBreakerService breakerService) {
|
||||
super(index, indexSettings, fieldNames, fieldDataType, cache);
|
||||
parentTypes = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
parentTypes = new TreeSet<>();
|
||||
this.breakerService = breakerService;
|
||||
for (DocumentMapper documentMapper : mapperService.docMappers(false)) {
|
||||
beforeCreate(documentMapper);
|
||||
|
@ -115,15 +94,60 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
}
|
||||
|
||||
@Override
|
||||
public ParentChildAtomicFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
public AtomicParentChildFieldData load(LeafReaderContext context) {
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0)) {
|
||||
final LeafReader reader = context.reader();
|
||||
final NavigableSet<String> parentTypes;
|
||||
synchronized (lock) {
|
||||
parentTypes = ImmutableSortedSet.copyOf(this.parentTypes);
|
||||
}
|
||||
return new AbstractAtomicParentChildFieldData() {
|
||||
|
||||
public Set<String> types() {
|
||||
return parentTypes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedDocValues getOrdinalsValues(String type) {
|
||||
try {
|
||||
return DocValues.getSorted(reader, ParentFieldMapper.joinField(type));
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("cannot load join doc values field for type [" + type + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
// unknown
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws ElasticsearchException {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return super.load(context);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractAtomicParentChildFieldData loadDirect(LeafReaderContext context) throws Exception {
|
||||
LeafReader reader = context.reader();
|
||||
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
|
||||
"acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO
|
||||
);
|
||||
|
||||
final NavigableSet<BytesRef> parentTypes;
|
||||
final NavigableSet<BytesRef> parentTypes = new TreeSet<>();
|
||||
synchronized (lock) {
|
||||
parentTypes = ImmutableSortedSet.copyOf(BytesRef.getUTF8SortedAsUnicodeComparator(), this.parentTypes);
|
||||
for (String parentType : this.parentTypes) {
|
||||
parentTypes.add(new BytesRef(parentType));
|
||||
}
|
||||
}
|
||||
boolean success = false;
|
||||
ParentChildAtomicFieldData data = null;
|
||||
|
@ -193,7 +217,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
if (parentFieldMapper.active()) {
|
||||
// A _parent field can never be added to an existing mapping, so a _parent field either exists on
|
||||
// a new created or doesn't exists. This is why we can update the known parent types via DocumentTypeListener
|
||||
if (parentTypes.add(new BytesRef(parentFieldMapper.type()))) {
|
||||
if (parentTypes.add(parentFieldMapper.type())) {
|
||||
clear();
|
||||
}
|
||||
}
|
||||
|
@ -321,11 +345,9 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
@Override
|
||||
public IndexParentChildFieldData localGlobalDirect(IndexReader indexReader) throws Exception {
|
||||
final long startTime = System.nanoTime();
|
||||
final Set<String> parentTypes = new HashSet<>();
|
||||
final Set<String> parentTypes;
|
||||
synchronized (lock) {
|
||||
for (BytesRef type : this.parentTypes) {
|
||||
parentTypes.add(type.utf8ToString());
|
||||
}
|
||||
parentTypes = ImmutableSet.copyOf(this.parentTypes);
|
||||
}
|
||||
|
||||
long ramBytesUsed = 0;
|
||||
|
@ -353,7 +375,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
);
|
||||
}
|
||||
|
||||
return new GlobalFieldData(indexReader, fielddata, ramBytesUsed);
|
||||
return new GlobalFieldData(indexReader, fielddata, ramBytesUsed, perType);
|
||||
}
|
||||
|
||||
private static class GlobalAtomicFieldData extends AbstractAtomicParentChildFieldData {
|
||||
|
@ -437,16 +459,18 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
|
||||
}
|
||||
|
||||
private class GlobalFieldData implements IndexParentChildFieldData, Accountable {
|
||||
public class GlobalFieldData implements IndexParentChildFieldData, Accountable {
|
||||
|
||||
private final AtomicParentChildFieldData[] fielddata;
|
||||
private final IndexReader reader;
|
||||
private final long ramBytesUsed;
|
||||
private final Map<String, OrdinalMapAndAtomicFieldData> ordinalMapPerType;
|
||||
|
||||
GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed) {
|
||||
GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed, Map<String, OrdinalMapAndAtomicFieldData> ordinalMapPerType) {
|
||||
this.reader = reader;
|
||||
this.ramBytesUsed = ramBytesUsed;
|
||||
this.fielddata = fielddata;
|
||||
this.ordinalMapPerType = ordinalMapPerType;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -515,4 +539,20 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the global ordinal map for the specified type
|
||||
*/
|
||||
// TODO: OrdinalMap isn't expose in the field data framework, because it is an implementation detail.
|
||||
// However the JoinUtil works directly with OrdinalMap, so this is a hack to get access to OrdinalMap
|
||||
// I don't think we should expose OrdinalMap in IndexFieldData, because only parent/child relies on it and for the
|
||||
// rest of the code OrdinalMap is an implementation detail, but maybe we can expose it in IndexParentChildFieldData interface?
|
||||
public static MultiDocValues.OrdinalMap getOrdinalMap(IndexParentChildFieldData indexParentChildFieldData, String type) {
|
||||
if (indexParentChildFieldData instanceof ParentChildIndexFieldData.GlobalFieldData) {
|
||||
return ((GlobalFieldData) indexParentChildFieldData).ordinalMapPerType.get(type).ordMap;
|
||||
} else {
|
||||
// one segment, local ordinals are global
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -134,8 +134,18 @@ public class DocumentMapper implements ToXContent {
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map<String, Object> parameters) {
|
||||
sourceTransforms.add(new ScriptTransform(scriptService, script, scriptType, language, parameters));
|
||||
public Builder transform(ScriptService scriptService, Script script) {
|
||||
sourceTransforms.add(new ScriptTransform(scriptService, script));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #transform(ScriptService, Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language,
|
||||
Map<String, Object> parameters) {
|
||||
sourceTransforms.add(new ScriptTransform(scriptService, new Script(script, scriptType, language, parameters)));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -150,7 +160,7 @@ public class DocumentMapper implements ToXContent {
|
|||
private final String type;
|
||||
private final StringAndBytesText typeText;
|
||||
|
||||
private volatile CompressedString mappingSource;
|
||||
private volatile CompressedXContent mappingSource;
|
||||
|
||||
private final Mapping mapping;
|
||||
|
||||
|
@ -235,7 +245,7 @@ public class DocumentMapper implements ToXContent {
|
|||
return mapping.meta;
|
||||
}
|
||||
|
||||
public CompressedString mappingSource() {
|
||||
public CompressedXContent mappingSource() {
|
||||
return this.mappingSource;
|
||||
}
|
||||
|
||||
|
@ -388,20 +398,24 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
private void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
|
||||
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
|
||||
mapperService.addFieldMappers(fieldMappers);
|
||||
}
|
||||
|
||||
public boolean isParent(String type) {
|
||||
return mapperService.getParentTypes().contains(type);
|
||||
}
|
||||
|
||||
private void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
builder.put(objectMapper.fullPath(), objectMapper);
|
||||
if (objectMapper.nested().isNested()) {
|
||||
hasNestedObjects = true;
|
||||
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
builder.put(objectMapper.fullPath(), objectMapper);
|
||||
if (objectMapper.nested().isNested()) {
|
||||
hasNestedObjects = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.objectMappers = builder.immutableMap();
|
||||
this.objectMappers = builder.immutableMap();
|
||||
mapperService.addObjectMappers(objectMappers);
|
||||
}
|
||||
|
||||
|
@ -454,26 +468,26 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
public MergeResult merge(Mapping mapping, boolean simulate) {
|
||||
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
||||
final MergeResult mergeResult = newMergeContext(simulate);
|
||||
this.mapping.merge(mapping, mergeResult);
|
||||
if (simulate == false) {
|
||||
addFieldMappers(mergeResult.getNewFieldMappers());
|
||||
addObjectMappers(mergeResult.getNewObjectMappers());
|
||||
refreshSource();
|
||||
}
|
||||
return mergeResult;
|
||||
final MergeResult mergeResult = newMergeContext(simulate);
|
||||
this.mapping.merge(mapping, mergeResult);
|
||||
if (simulate == false) {
|
||||
addFieldMappers(mergeResult.getNewFieldMappers());
|
||||
addObjectMappers(mergeResult.getNewObjectMappers());
|
||||
refreshSource();
|
||||
}
|
||||
return mergeResult;
|
||||
}
|
||||
}
|
||||
|
||||
private void refreshSource() throws ElasticsearchGenerationException {
|
||||
try {
|
||||
BytesStreamOutput bStream = new BytesStreamOutput();
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream));
|
||||
builder.startObject();
|
||||
toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
builder.close();
|
||||
mappingSource = new CompressedString(bStream.bytes());
|
||||
try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON, CompressorFactory.defaultCompressor().streamOutput(bStream))) {
|
||||
builder.startObject();
|
||||
toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
}
|
||||
mappingSource = new CompressedXContent(bStream.bytes());
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
|
||||
}
|
||||
|
@ -498,28 +512,13 @@ public class DocumentMapper implements ToXContent {
|
|||
private static class ScriptTransform implements SourceTransform {
|
||||
private final ScriptService scriptService;
|
||||
/**
|
||||
* Contents of the script to transform the source document before indexing.
|
||||
* The script to transform the source document before indexing.
|
||||
*/
|
||||
private final String script;
|
||||
/**
|
||||
* The type of the script to run.
|
||||
*/
|
||||
private final ScriptType scriptType;
|
||||
/**
|
||||
* Language of the script to transform the source document before indexing.
|
||||
*/
|
||||
private final String language;
|
||||
/**
|
||||
* Parameters passed to the transform script.
|
||||
*/
|
||||
private final Map<String, Object> parameters;
|
||||
private final Script script;
|
||||
|
||||
public ScriptTransform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map<String, Object> parameters) {
|
||||
public ScriptTransform(ScriptService scriptService, Script script) {
|
||||
this.scriptService = scriptService;
|
||||
this.script = script;
|
||||
this.scriptType = scriptType;
|
||||
this.language = language;
|
||||
this.parameters = parameters;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -527,7 +526,7 @@ public class DocumentMapper implements ToXContent {
|
|||
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
|
||||
try {
|
||||
// We use the ctx variable and the _source name to be consistent with the update api.
|
||||
ExecutableScript executable = scriptService.executable(new Script(language, script, scriptType, parameters), ScriptContext.Standard.MAPPING);
|
||||
ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING);
|
||||
Map<String, Object> ctx = new HashMap<>(1);
|
||||
ctx.put("_source", sourceAsMap);
|
||||
executable.setNextVar("ctx", ctx);
|
||||
|
@ -541,16 +540,7 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("script", script);
|
||||
if (language != null) {
|
||||
builder.field("lang", language);
|
||||
}
|
||||
if (parameters != null) {
|
||||
builder.field("params", parameters);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
return script.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -71,10 +71,8 @@ import org.elasticsearch.index.mapper.object.ObjectMapper;
|
|||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.similarity.SimilarityLookupService;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -194,15 +192,15 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
return parse(type, mapping, defaultSource);
|
||||
}
|
||||
|
||||
public DocumentMapper parseCompressed(@Nullable String type, CompressedString source) throws MapperParsingException {
|
||||
public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source) throws MapperParsingException {
|
||||
return parseCompressed(type, source, null);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public DocumentMapper parseCompressed(@Nullable String type, CompressedString source, String defaultSource) throws MapperParsingException {
|
||||
public DocumentMapper parseCompressed(@Nullable String type, CompressedXContent source, String defaultSource) throws MapperParsingException {
|
||||
Map<String, Object> mapping = null;
|
||||
if (source != null) {
|
||||
Map<String, Object> root = XContentHelper.convertToMap(source.compressed(), true).v2();
|
||||
Map<String, Object> root = XContentHelper.convertToMap(source.compressedReference(), true).v2();
|
||||
Tuple<String, Map<String, Object>> t = extractMapping(type, root);
|
||||
type = t.v1();
|
||||
mapping = t.v2();
|
||||
|
@ -238,7 +236,6 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
Object fieldNode = entry.getValue();
|
||||
|
||||
if ("transform".equals(fieldName)) {
|
||||
iterator.remove();
|
||||
if (fieldNode instanceof Map) {
|
||||
parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated());
|
||||
} else if (fieldNode instanceof List) {
|
||||
|
@ -251,6 +248,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
} else {
|
||||
throw new MapperParsingException("Transform must be an object or an array but was: " + fieldNode);
|
||||
}
|
||||
iterator.remove();
|
||||
} else {
|
||||
Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName);
|
||||
if (typeParser != null) {
|
||||
|
@ -296,23 +294,10 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
return remainingFields.toString();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig, Version indexVersionCreated) {
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
scriptParameterParser.parseConfig(transformConfig, true);
|
||||
|
||||
String script = null;
|
||||
ScriptType scriptType = null;
|
||||
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
|
||||
if (scriptValue != null) {
|
||||
script = scriptValue.script();
|
||||
scriptType = scriptValue.scriptType();
|
||||
}
|
||||
|
||||
Script script = Script.parse(transformConfig, true);
|
||||
if (script != null) {
|
||||
String scriptLang = scriptParameterParser.lang();
|
||||
Map<String, Object> params = (Map<String, Object>)transformConfig.remove("params");
|
||||
docBuilder.transform(scriptService, script, scriptType, scriptLang, params);
|
||||
docBuilder.transform(scriptService, script);
|
||||
}
|
||||
checkNoRemainingFields(transformConfig, indexVersionCreated, "Transform config has unsupported parameters: ");
|
||||
}
|
||||
|
|
|
@ -195,6 +195,8 @@ public class MappedFieldType extends FieldType {
|
|||
return new MappedFieldType(this);
|
||||
}
|
||||
|
||||
// norelease: we need to override freeze() and add safety checks that all settings are actually set
|
||||
|
||||
public boolean isNumeric() {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -21,10 +21,7 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.*;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||
|
@ -40,10 +37,11 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedString;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
|
@ -117,6 +115,8 @@ public class MapperService extends AbstractIndexComponent {
|
|||
|
||||
private volatile ImmutableMap<String, FieldMapper> unmappedFieldMappers = ImmutableMap.of();
|
||||
|
||||
private volatile ImmutableSet<String> parentTypes = ImmutableSet.of();
|
||||
|
||||
@Inject
|
||||
public MapperService(Index index, @IndexSettings Settings indexSettings, AnalysisService analysisService, IndexFieldDataService fieldDataService,
|
||||
SimilarityLookupService similarityLookupService,
|
||||
|
@ -214,7 +214,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
typeListeners.remove(listener);
|
||||
}
|
||||
|
||||
public DocumentMapper merge(String type, CompressedString mappingSource, boolean applyDefault) {
|
||||
public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault) {
|
||||
if (DEFAULT_MAPPING.equals(type)) {
|
||||
// verify we can parse it
|
||||
DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource);
|
||||
|
@ -250,6 +250,9 @@ public class MapperService extends AbstractIndexComponent {
|
|||
if (mapper.type().contains(",")) {
|
||||
throw new InvalidTypeNameException("mapping type name [" + mapper.type() + "] should not include ',' in it");
|
||||
}
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0) && mapper.type().equals(mapper.parentFieldMapper().type())) {
|
||||
throw new IllegalArgumentException("The [_parent.type] option can't point to the same type");
|
||||
}
|
||||
if (mapper.type().contains(".") && !PercolatorService.TYPE_NAME.equals(mapper.type())) {
|
||||
logger.warn("Type [{}] contains a '.', it is recommended not to include it within a type name", mapper.type());
|
||||
}
|
||||
|
@ -285,6 +288,12 @@ public class MapperService extends AbstractIndexComponent {
|
|||
typeListener.beforeCreate(mapper);
|
||||
}
|
||||
mappers = newMapBuilder(mappers).put(mapper.type(), mapper).map();
|
||||
if (mapper.parentFieldMapper().active()) {
|
||||
ImmutableSet.Builder<String> parentTypesCopy = ImmutableSet.builder();
|
||||
parentTypesCopy.addAll(parentTypes);
|
||||
parentTypesCopy.add(mapper.parentFieldMapper().type());
|
||||
parentTypes = parentTypesCopy.build();
|
||||
}
|
||||
assert assertSerialization(mapper);
|
||||
return mapper;
|
||||
}
|
||||
|
@ -293,7 +302,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
|
||||
private boolean assertSerialization(DocumentMapper mapper) {
|
||||
// capture the source now, it may change due to concurrent parsing
|
||||
final CompressedString mappingSource = mapper.mappingSource();
|
||||
final CompressedXContent mappingSource = mapper.mappingSource();
|
||||
DocumentMapper newMapper = parse(mapper.type(), mappingSource, false);
|
||||
|
||||
if (newMapper.mappingSource().equals(mappingSource) == false) {
|
||||
|
@ -328,7 +337,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers);
|
||||
}
|
||||
|
||||
public DocumentMapper parse(String mappingType, CompressedString mappingSource, boolean applyDefault) throws MapperParsingException {
|
||||
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
|
||||
String defaultMappingSource;
|
||||
if (PercolatorService.TYPE_NAME.equals(mappingType)) {
|
||||
defaultMappingSource = this.defaultPercolatorMappingSource;
|
||||
|
@ -645,6 +654,10 @@ public class MapperService extends AbstractIndexComponent {
|
|||
return null;
|
||||
}
|
||||
|
||||
public ImmutableSet<String> getParentTypes() {
|
||||
return parentTypes;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Whether a field is a metadata field.
|
||||
*/
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.ParseField;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.compress.NotXContentException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper.internal;
|
|||
|
||||
import com.google.common.base.Objects;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -36,6 +38,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperBuilders;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
|
@ -55,7 +58,6 @@ import java.util.Map;
|
|||
import static org.elasticsearch.common.settings.Settings.builder;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.parent;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -110,7 +112,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
@Override
|
||||
public ParentFieldMapper build(BuilderContext context) {
|
||||
if (type == null) {
|
||||
throw new MapperParsingException("Parent mapping must contain the parent type");
|
||||
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
|
||||
}
|
||||
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
|
||||
return new ParentFieldMapper(fieldType, type, fieldDataSettings, context.indexSettings());
|
||||
|
@ -120,7 +122,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
ParentFieldMapper.Builder builder = parent();
|
||||
ParentFieldMapper.Builder builder = MapperBuilders.parent();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -267,6 +269,11 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
boolean parent = context.docMapper().isParent(context.type());
|
||||
if (parent && fieldType.hasDocValues()) {
|
||||
fields.add(createJoinField(context.type(), context.id()));
|
||||
}
|
||||
|
||||
if (!active()) {
|
||||
return;
|
||||
}
|
||||
|
@ -276,6 +283,9 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
String parentId = context.parser().text();
|
||||
context.sourceToParse().parent(parentId);
|
||||
fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
|
||||
if (fieldType.hasDocValues()) {
|
||||
fields.add(createJoinField(type, parentId));
|
||||
}
|
||||
} else {
|
||||
// otherwise, we are running it post processing of the xcontent
|
||||
String parsedParentId = context.doc().get(Defaults.NAME);
|
||||
|
@ -287,6 +297,9 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
}
|
||||
// we did not add it in the parsing phase, add it now
|
||||
fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
|
||||
if (fieldType.hasDocValues()) {
|
||||
fields.add(createJoinField(type, parentId));
|
||||
}
|
||||
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) {
|
||||
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
|
||||
}
|
||||
|
@ -295,6 +308,15 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
// we have parent mapping, yet no value was set, ignore it...
|
||||
}
|
||||
|
||||
private SortedDocValuesField createJoinField(String parentType, String id) {
|
||||
String joinField = joinField(parentType);
|
||||
return new SortedDocValuesField(joinField, new BytesRef(id));
|
||||
}
|
||||
|
||||
public static String joinField(String parentType) {
|
||||
return ParentFieldMapper.NAME + "#" + parentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
@ -318,26 +340,10 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef indexedValueForSearch(Object value) {
|
||||
if (value instanceof BytesRef) {
|
||||
BytesRef bytesRef = (BytesRef) value;
|
||||
if (Uid.hasDelimiter(bytesRef)) {
|
||||
return bytesRef;
|
||||
}
|
||||
return Uid.createUidAsBytes(typeAsBytes, bytesRef);
|
||||
}
|
||||
String sValue = value.toString();
|
||||
if (sValue.indexOf(Uid.DELIMITER) == -1) {
|
||||
return Uid.createUidAsBytes(type, sValue);
|
||||
}
|
||||
return super.indexedValueForSearch(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
|
||||
if (!Objects.equal(type, other.type)) {
|
||||
if (Objects.equal(type, other.type) == false) {
|
||||
mergeResult.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + other.type + "]");
|
||||
}
|
||||
|
||||
|
@ -357,7 +363,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
}
|
||||
|
||||
/**
|
||||
* @return Whether the _parent field is actually used.
|
||||
* @return Whether the _parent field is actually configured.
|
||||
*/
|
||||
public boolean active() {
|
||||
return type != null;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import com.google.common.base.Objects;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
@ -30,7 +31,6 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.compress.CompressedStreamInput;
|
||||
import org.elasticsearch.common.compress.Compressor;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -53,7 +53,9 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.RootMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -359,9 +361,11 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
// see if we need to convert the content type
|
||||
Compressor compressor = CompressorFactory.compressor(source);
|
||||
if (compressor != null) {
|
||||
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
InputStream compressedStreamInput = compressor.streamInput(source.streamInput());
|
||||
if (compressedStreamInput.markSupported() == false) {
|
||||
compressedStreamInput = new BufferedInputStream(compressedStreamInput);
|
||||
}
|
||||
XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
|
||||
compressedStreamInput.resetToBufferStart();
|
||||
if (contentType != formatContentType) {
|
||||
// we need to reread and store back, compressed....
|
||||
BytesStreamOutput bStream = new BytesStreamOutput();
|
||||
|
|
|
@ -19,16 +19,23 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiDocValues;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
|
@ -38,6 +45,7 @@ import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
|
|||
import org.elasticsearch.index.search.child.ChildrenQuery;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -137,8 +145,9 @@ public class HasChildQueryParser implements QueryParser {
|
|||
if (childDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
|
||||
}
|
||||
if (!childDocMapper.parentFieldMapper().active()) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] Type [" + childType + "] does not have parent mapping");
|
||||
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] _parent field has no parent type configured");
|
||||
}
|
||||
|
||||
if (innerHits != null) {
|
||||
|
@ -147,11 +156,6 @@ public class HasChildQueryParser implements QueryParser {
|
|||
parseContext.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
|
||||
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
|
||||
if (!parentFieldMapper.active()) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] _parent field not configured");
|
||||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
|
@ -171,16 +175,20 @@ public class HasChildQueryParser implements QueryParser {
|
|||
// wrap the query with type query
|
||||
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter());
|
||||
|
||||
Query query;
|
||||
// TODO: use the query API
|
||||
Filter parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
|
||||
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren,
|
||||
maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
final Query query;
|
||||
final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
|
||||
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren);
|
||||
} else {
|
||||
query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter,
|
||||
shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
// TODO: use the query API
|
||||
Filter parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter());
|
||||
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
|
||||
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren,
|
||||
maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
} else {
|
||||
query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter,
|
||||
shortCircuitParentDocSet, nonNestedDocsFilter);
|
||||
}
|
||||
}
|
||||
if (queryName != null) {
|
||||
parseContext.addNamedQuery(queryName, query);
|
||||
|
@ -188,4 +196,44 @@ public class HasChildQueryParser implements QueryParser {
|
|||
query.setBoost(boost);
|
||||
return query;
|
||||
}
|
||||
|
||||
public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreType scoreType, Query innerQuery, int minChildren, int maxChildren) throws IOException {
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
if (searchContext == null) {
|
||||
throw new IllegalStateException("Search context is required to be set");
|
||||
}
|
||||
|
||||
String joinField = ParentFieldMapper.joinField(parentType);
|
||||
ScoreMode scoreMode;
|
||||
// TODO: move entirely over from ScoreType to org.apache.lucene.join.ScoreMode, when we drop the 1.x parent child code.
|
||||
switch (scoreType) {
|
||||
case NONE:
|
||||
scoreMode = ScoreMode.None;
|
||||
break;
|
||||
case MIN:
|
||||
scoreMode = ScoreMode.Min;
|
||||
break;
|
||||
case MAX:
|
||||
scoreMode = ScoreMode.Max;
|
||||
break;
|
||||
case SUM:
|
||||
scoreMode = ScoreMode.Total;
|
||||
break;
|
||||
case AVG:
|
||||
scoreMode = ScoreMode.Avg;
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException("score type [" + scoreType + "] not supported");
|
||||
}
|
||||
IndexReader indexReader = searchContext.searcher().getIndexReader();
|
||||
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
|
||||
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
|
||||
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
|
||||
|
||||
// 0 in pre 2.x p/c impl means unbounded
|
||||
if (maxChildren == 0) {
|
||||
maxChildren = Integer.MAX_VALUE;
|
||||
}
|
||||
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -36,6 +37,7 @@ import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
|
|||
import org.elasticsearch.index.query.support.XContentStructure;
|
||||
import org.elasticsearch.index.search.child.ParentConstantScoreQuery;
|
||||
import org.elasticsearch.index.search.child.ParentQuery;
|
||||
import org.elasticsearch.index.search.child.ScoreType;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
|
||||
|
@ -43,6 +45,8 @@ import java.io.IOException;
|
|||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.index.query.HasChildQueryParser.joinUtilHelper;
|
||||
|
||||
public class HasParentQueryParser implements QueryParser {
|
||||
|
||||
public static final String NAME = "has_parent";
|
||||
|
@ -142,7 +146,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
return query;
|
||||
}
|
||||
|
||||
static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, Tuple<String, SubSearchContext> innerHits) {
|
||||
static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, Tuple<String, SubSearchContext> innerHits) throws IOException {
|
||||
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType
|
||||
|
@ -197,10 +201,15 @@ public class HasParentQueryParser implements QueryParser {
|
|||
// wrap the query with type query
|
||||
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter());
|
||||
Filter childrenFilter = new QueryWrapperFilter(Queries.not(parentFilter));
|
||||
if (score) {
|
||||
return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
|
||||
if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
ScoreType scoreMode = score ? ScoreType.MAX : ScoreType.NONE;
|
||||
return joinUtilHelper(parentType, parentChildIndexFieldData, childrenFilter, scoreMode, innerQuery, 0, Integer.MAX_VALUE);
|
||||
} else {
|
||||
return new ParentConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
|
||||
if (score) {
|
||||
return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
|
||||
} else {
|
||||
return new ParentConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,9 @@ import org.elasticsearch.common.geo.ShapeRelation;
|
|||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
@ -562,6 +564,13 @@ public abstract class QueryBuilders {
|
|||
return new GeoShapeQueryBuilder(name, shape);
|
||||
}
|
||||
|
||||
/**
|
||||
* Facilitates creating template query requests using an inline script
|
||||
*/
|
||||
public static TemplateQueryBuilder templateQuery(Template template) {
|
||||
return new TemplateQueryBuilder(template);
|
||||
}
|
||||
|
||||
/**
|
||||
* Facilitates creating template query requests using an inline script
|
||||
*/
|
||||
|
@ -596,6 +605,18 @@ public abstract class QueryBuilders {
|
|||
*
|
||||
* @param script The script to filter by.
|
||||
*/
|
||||
public static ScriptQueryBuilder scriptQuery(Script script) {
|
||||
return new ScriptQueryBuilder(script);
|
||||
}
|
||||
|
||||
/**
|
||||
* A builder for filter based on a script.
|
||||
*
|
||||
* @param script
|
||||
* The script to filter by.
|
||||
* @deprecated Use {@link #scriptQuery(Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static ScriptQueryBuilder scriptQuery(String script) {
|
||||
return new ScriptQueryBuilder(script);
|
||||
}
|
||||
|
|
|
@ -21,6 +21,8 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.queryparser.classic.MapperQueryParser;
|
||||
import org.apache.lucene.queryparser.classic.QueryParserSettings;
|
||||
|
@ -79,6 +81,8 @@ public class QueryParseContext {
|
|||
|
||||
private final Index index;
|
||||
|
||||
private final Version indexVersionCreated;
|
||||
|
||||
private final IndexQueryParserService indexQueryParser;
|
||||
|
||||
private final Map<String, Query> namedQueries = Maps.newHashMap();
|
||||
|
@ -99,6 +103,7 @@ public class QueryParseContext {
|
|||
|
||||
public QueryParseContext(Index index, IndexQueryParserService indexQueryParser) {
|
||||
this.index = index;
|
||||
this.indexVersionCreated = Version.indexCreated(indexQueryParser.indexSettings());
|
||||
this.indexQueryParser = indexQueryParser;
|
||||
}
|
||||
|
||||
|
@ -376,4 +381,8 @@ public class QueryParseContext {
|
|||
public boolean isDeprecatedSetting(String setting) {
|
||||
return CACHE.match(setting) || CACHE_KEY.match(setting);
|
||||
}
|
||||
|
||||
public Version indexVersionCreated() {
|
||||
return indexVersionCreated;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,34 +20,56 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.google.common.collect.Maps.newHashMap;
|
||||
|
||||
public class ScriptQueryBuilder extends QueryBuilder {
|
||||
|
||||
private final String script;
|
||||
private Script script;
|
||||
|
||||
@Deprecated
|
||||
private String scriptString;
|
||||
|
||||
@Deprecated
|
||||
private Map<String, Object> params;
|
||||
|
||||
@Deprecated
|
||||
private String lang;
|
||||
|
||||
private String queryName;
|
||||
|
||||
public ScriptQueryBuilder(String script) {
|
||||
public ScriptQueryBuilder(Script script) {
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public ScriptQueryBuilder(String script) {
|
||||
this.scriptString = script;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public ScriptQueryBuilder addParam(String name, Object value) {
|
||||
if (params == null) {
|
||||
params = newHashMap();
|
||||
params = new HashMap<>();
|
||||
}
|
||||
params.put(name, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public ScriptQueryBuilder params(Map<String, Object> params) {
|
||||
if (this.params == null) {
|
||||
this.params = params;
|
||||
|
@ -59,7 +81,10 @@ public class ScriptQueryBuilder extends QueryBuilder {
|
|||
|
||||
/**
|
||||
* Sets the script language.
|
||||
*
|
||||
* @deprecated Use {@link #ScriptQueryBuilder(Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public ScriptQueryBuilder lang(String lang) {
|
||||
this.lang = lang;
|
||||
return this;
|
||||
|
@ -74,15 +99,23 @@ public class ScriptQueryBuilder extends QueryBuilder {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
protected void doXContent(XContentBuilder builder, Params builderParams) throws IOException {
|
||||
|
||||
builder.startObject(ScriptQueryParser.NAME);
|
||||
builder.field("script", script);
|
||||
if (this.params != null) {
|
||||
builder.field("params", this.params);
|
||||
}
|
||||
if (this.lang != null) {
|
||||
builder.field("lang", lang);
|
||||
if (script != null) {
|
||||
builder.field(ScriptField.SCRIPT.getPreferredName(), script);
|
||||
} else {
|
||||
if (this.scriptString != null) {
|
||||
builder.field("script", scriptString);
|
||||
}
|
||||
if (this.params != null) {
|
||||
builder.field("params", this.params);
|
||||
}
|
||||
if (this.lang != null) {
|
||||
builder.field("lang", lang);
|
||||
}
|
||||
}
|
||||
|
||||
if (queryName != null) {
|
||||
builder.field("_name", queryName);
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue