Merge branch 'master' into feature/translog_checkpoints

Conflicts:
	src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java
This commit is contained in:
Simon Willnauer 2015-05-15 17:01:23 +02:00
commit 9f81ae4845
291 changed files with 3452 additions and 2577 deletions

View File

@ -85,5 +85,8 @@ set JAVA_OPTS=%JAVA_OPTS% -XX:+DisableExplicitGC
REM Ensure UTF-8 encoding by default (e.g. filenames) REM Ensure UTF-8 encoding by default (e.g. filenames)
set JAVA_OPTS=%JAVA_OPTS% -Dfile.encoding=UTF-8 set JAVA_OPTS=%JAVA_OPTS% -Dfile.encoding=UTF-8
REM Use our provided JNA always versus the system one
set JAVA_OPTS=%JAVA_OPTS% -Djna.nosys=true
set ES_CLASSPATH=%ES_CLASSPATH%;%ES_HOME%/lib/${project.build.finalName}.jar;%ES_HOME%/lib/*;%ES_HOME%/lib/sigar/* set ES_CLASSPATH=%ES_CLASSPATH%;%ES_HOME%/lib/${project.build.finalName}.jar;%ES_HOME%/lib/*;%ES_HOME%/lib/sigar/*
set ES_PARAMS=-Delasticsearch -Des-foreground=yes -Des.path.home="%ES_HOME%" set ES_PARAMS=-Delasticsearch -Des-foreground=yes -Des.path.home="%ES_HOME%"

View File

@ -68,3 +68,6 @@ JAVA_OPTS="$JAVA_OPTS -XX:+DisableExplicitGC"
# Ensure UTF-8 encoding by default (e.g. filenames) # Ensure UTF-8 encoding by default (e.g. filenames)
JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8" JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8"
# Use our provided JNA always versus the system one
JAVA_OPTS="$JAVA_OPTS -Djna.nosys=true"

View File

@ -123,3 +123,26 @@ settings and filter the returned buckets based on a `min_doc_count` setting (by
bucket that matches documents and the last one are returned). This histogram also supports the `extended_bounds` bucket that matches documents and the last one are returned). This histogram also supports the `extended_bounds`
setting, which enables extending the bounds of the histogram beyond the data itself (to read more on why you'd want to setting, which enables extending the bounds of the histogram beyond the data itself (to read more on why you'd want to
do that please refer to the explanation <<search-aggregations-bucket-histogram-aggregation-extended-bounds,here>>). do that please refer to the explanation <<search-aggregations-bucket-histogram-aggregation-extended-bounds,here>>).
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"publish_date" : {
"datehistogram" : {
"field" : "publish_date",
"interval": "year",
"missing": "2000-01-01" <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `publish_date` field will fall into the same bucket as documents that have the value `2000-01-01`.

View File

@ -317,3 +317,26 @@ Response:
} }
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"quantity" : {
"histogram" : {
"field" : "quantity",
"interval": 10,
"missing": 0 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `quantity` field will fall into the same bucket as documents that have the value `0`.

View File

@ -655,3 +655,25 @@ in inner aggregations.
<1> experimental[] the possible values are `map`, `global_ordinals`, `global_ordinals_hash` and `global_ordinals_low_cardinality` <1> experimental[] the possible values are `map`, `global_ordinals`, `global_ordinals_hash` and `global_ordinals_low_cardinality`
Please note that Elasticsearch will ignore this execution hint if it is not applicable and that there is no backward compatibility guarantee on these hints. Please note that Elasticsearch will ignore this execution hint if it is not applicable and that there is no backward compatibility guarantee on these hints.
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"tags" : {
"terms" : {
"field" : "tags",
"missing": "N/A" <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `tags` field will fall into the same bucket as documents that have the value `N/A`.

View File

@ -73,3 +73,25 @@ It turned out that the exam was way above the level of the students and a grade
} }
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grade_avg" : {
"avg" : {
"field" : "grade",
"missing": 10 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.

View File

@ -155,3 +155,24 @@ however since hashes need to be computed on the fly.
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"tag_cardinality" : {
"cardinality" : {
"field" : "tag",
"missing": "N/A" <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `tag` field will fall into the same bucket as documents that have the value `N/A`.

View File

@ -117,3 +117,25 @@ It turned out that the exam was way above the level of the students and a grade
} }
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grades_stats" : {
"extended_stats" : {
"field" : "grade",
"missing": 0 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `0`.

View File

@ -67,3 +67,24 @@ Let's say that the prices of the documents in our index are in USD, but we would
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grade_max" : {
"max" : {
"field" : "grade",
"missing": 10 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.

View File

@ -66,3 +66,25 @@ Let's say that the prices of the documents in our index are in USD, but we would
} }
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grade_min" : {
"min" : {
"field" : "grade",
"missing": 10 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.

View File

@ -190,3 +190,25 @@ A "node" uses roughly 32 bytes of memory, so under worst-case scenarios (large a
of data which arrives sorted and in-order) the default settings will produce a of data which arrives sorted and in-order) the default settings will produce a
TDigest roughly 64KB in size. In practice data tends to be more random and TDigest roughly 64KB in size. In practice data tends to be more random and
the TDigest will use less memory. the TDigest will use less memory.
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grade_percentiles" : {
"percentiles" : {
"field" : "grade",
"missing": 10 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.

View File

@ -86,3 +86,25 @@ script to generate values which percentile ranks are calculated on
<2> Scripting supports parameterized input just like any other script <2> Scripting supports parameterized input just like any other script
TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory. TIP: The `script` parameter expects an inline script. Use `script_id` for indexed scripts and `script_file` for scripts in the `config/scripts/` directory.
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grade_ranks" : {
"percentile_ranks" : {
"field" : "grade",
"missing": 10 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.

View File

@ -79,3 +79,25 @@ It turned out that the exam was way above the level of the students and a grade
} }
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"grades_stats" : {
"stats" : {
"field" : "grade",
"missing": 0 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `0`.

View File

@ -77,3 +77,25 @@ Computing the sum of squares over all stock tick changes:
} }
} }
-------------------------------------------------- --------------------------------------------------
==== Missing value
The `missing` parameter defines how documents that are missing a value should be treated.
By default they will be ignored but it is also possible to treat them as if they
had a value.
[source,js]
--------------------------------------------------
{
"aggs" : {
"total_time" : {
"sum" : {
"field" : "took",
"missing": 100 <1>
}
}
}
}
--------------------------------------------------
<1> Documents without a value in the `took` field will fall into the same bucket as documents that have the value `100`.

View File

@ -131,8 +131,6 @@ get operations |2
gets |0s gets |0s
|`get.missing_total` |`gmto`, `getMissingTotal` |No |Number of failed |`get.missing_total` |`gmto`, `getMissingTotal` |No |Number of failed
get operations |1 get operations |1
|`id_cache.memory_size` |`im`, `idCacheMemory` |No |Used ID cache
memory |216b
|`indexing.delete_current` |`idc`, `indexingDeleteCurrent` |No |Number |`indexing.delete_current` |`idc`, `indexingDeleteCurrent` |No |Number
of current deletion operations |0 of current deletion operations |0
|`indexing.delete_time` |`idti`, `indexingDeleteTime` |No |Time spent in |`indexing.delete_time` |`idti`, `indexingDeleteTime` |No |Time spent in

View File

@ -61,10 +61,6 @@ Will return, for example:
"memory_size_in_bytes": 0, "memory_size_in_bytes": 0,
"evictions": 0 "evictions": 0
}, },
"id_cache": {
"memory_size": "0b",
"memory_size_in_bytes": 0
},
"completion": { "completion": {
"size": "0b", "size": "0b",
"size_in_bytes": 0 "size_in_bytes": 0

View File

@ -18,6 +18,19 @@ curl -XGET 'localhost:9200/_analyze' -d '
coming[2.0.0, body based parameters were added in 2.0.0] coming[2.0.0, body based parameters were added in 2.0.0]
If text parameter is provided as array of strings, it is analyzed as a multi-valued field.
[source,js]
--------------------------------------------------
curl -XGET 'localhost:9200/_analyze' -d '
{
"analyzer" : "standard",
"text" : ["this is a test", "the second text"]
}'
--------------------------------------------------
coming[2.0.0, body based parameters were added in 2.0.0]
Or by building a custom transient analyzer out of tokenizers, Or by building a custom transient analyzer out of tokenizers,
token filters and char filters. Token filters can use the shorter 'filters' token filters and char filters. Token filters can use the shorter 'filters'
parameter name: parameter name:

View File

@ -10,8 +10,7 @@ $ curl -XPOST 'http://localhost:9200/twitter/_cache/clear'
-------------------------------------------------- --------------------------------------------------
The API, by default, will clear all caches. Specific caches can be cleaned The API, by default, will clear all caches. Specific caches can be cleaned
explicitly by setting `filter`, `fielddata`, `query_cache`, explicitly by setting `filter`, `fielddata` or `query_cache`.
or `id_cache` to `true`.
All caches relating to a specific field(s) can also be cleared by All caches relating to a specific field(s) can also be cleared by
specifying `fields` parameter with a comma delimited list of the specifying `fields` parameter with a comma delimited list of the

View File

@ -560,3 +560,29 @@ same search request will likely be off if `top_children` was used.
=== Removed file based index templates === Removed file based index templates
Index templates can no longer be configured on disk. Use the `_template` API instead. Index templates can no longer be configured on disk. Use the `_template` API instead.
[float]
=== Removed `id_cache` from stats apis
Removed `id_cache` metric from nodes stats, indices stats and cluster stats apis. This metric has also been removed
from the shards cat, indices cat and nodes cat apis. Parent/child memory is now reported under fielddata, because it
has internally be using fielddata for a while now.
To just see how much parent/child related field data is taking, the `fielddata_fields` option can be used on the stats
apis. Indices stats example:
[source,js]
--------------------------------------------------
curl -XGET "http://localhost:9200/_stats/fielddata?pretty&human&fielddata_fields=_parent"
--------------------------------------------------
Parent/child is using field data for the `_parent` field since version `1.1.0`, but the memory stats for the `_parent`
field were still shown under `id_cache` metric in the stats apis for backwards compatible reasons between 1.x versions.
Before version `1.1.0` the parent/child had its own in-memory data structures for id values in the `_parent` field.
[float]
=== Removed `id_cache` from clear cache api
Removed `id_cache` option from the clear cache apis. The `fielddata` option should be used to clear `_parent` field
from fielddata.

View File

@ -212,6 +212,7 @@ You can disable that check using `plugins.check_lucene: false`.
.Supported by the community .Supported by the community
* https://github.com/shikhar/eskka[eskka Discovery Plugin] (by Shikhar Bhushan) * https://github.com/shikhar/eskka[eskka Discovery Plugin] (by Shikhar Bhushan)
* https://github.com/grantr/elasticsearch-srv-discovery[DNS SRV Discovery Plugin] (by Grant Rodgers)
[float] [float]
[[river]] [[river]]

View File

@ -82,11 +82,11 @@ Additionally, every child document is mapped to its parent using a long
value (approximately). It is advisable to keep the string parent ID short value (approximately). It is advisable to keep the string parent ID short
in order to reduce memory usage. in order to reduce memory usage.
You can check how much memory is being used by the ID cache using the You can check how much memory is being used by the `_parent` field in the fielddata cache
<<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>> using the <<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>>
APIS, eg: APIS, eg:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XGET "http://localhost:9200/_stats/id_cache?pretty&human" curl -XGET "http://localhost:9200/_stats/fielddata?pretty&human&fielddata_fields=_parent"
-------------------------------------------------- --------------------------------------------------

View File

@ -57,13 +57,13 @@ Additionally, every child document is mapped to its parent using a long
value (approximately). It is advisable to keep the string parent ID short value (approximately). It is advisable to keep the string parent ID short
in order to reduce memory usage. in order to reduce memory usage.
You can check how much memory is being used by the ID cache using the You can check how much memory is being used by the `_parent` field in the fielddata cache
<<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>> using the <<indices-stats,indices stats>> or <<cluster-nodes-stats,nodes stats>>
APIS, eg: APIS, eg:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
curl -XGET "http://localhost:9200/_stats/id_cache?pretty&human" curl -XGET "http://localhost:9200/_stats/fielddata?pretty&human&fielddata_fields=_parent"
-------------------------------------------------- --------------------------------------------------

View File

@ -50,6 +50,8 @@ to. The `mode` option can have the following values:
number based array fields. number based array fields.
`avg`:: Use the average of all values as sort value. Only applicable `avg`:: Use the average of all values as sort value. Only applicable
for number based array fields. for number based array fields.
`median`:: Use the median of all values as sort value. Only applicable
for number based array fields.
===== Sort mode example usage ===== Sort mode example usage

View File

@ -163,20 +163,18 @@ can contain misspellings (See parameter descriptions below).
`collate`:: `collate`::
Checks each suggestion against the specified `query` or `filter` to Checks each suggestion against the specified `query` or `filter` to
prune suggestions for which no matching docs exist in the index. Either prune suggestions for which no matching docs exist in the index.
a `query` or a `filter` must be specified, and it is run as a The collate query for a suggestion is run only on the local shard from which
<<query-dsl-template-query,`template` query>>. The current suggestion is the suggestion has been generated from. Either a `query` or a `filter` must
automatically made available as the `{{suggestion}}` variable, which be specified, and it is run as a <<query-dsl-template-query,`template` query>>.
should be used in your query/filter. You can still specify your own The current suggestion is automatically made available as the `{{suggestion}}`
template `params` -- the `suggestion` value will be added to the variable, which should be used in your query/filter. You can still specify
variables you specify. You can specify a `preference` to control your own template `params` -- the `suggestion` value will be added to the
on which shards the query is executed (see <<search-request-preference>>). variables you specify. Additionally, you can specify a `prune` to control
The default value is `_only_local`. Additionally, you can specify if all phrase suggestions will be returned, when set to `true` the suggestions
a `prune` to control if all phrase suggestions will be will have an additional option `collate_match`, which will be `true` if
returned, when set to `true` the suggestions will have an additional matching documents for the phrase was found, `false` otherwise.
option `collate_match`, which will be `true` if matching documents The default value for `prune` is `false`.
for the phrase was found, `false` otherwise. The default value for
`prune` is `false`.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -199,8 +197,7 @@ curl -XPOST 'localhost:9200/_search' -d {
} }
}, },
"params": {"field_name" : "title"}, <3> "params": {"field_name" : "title"}, <3>
"preference": "_primary", <4> "prune": true <4>
"prune": true <5>
} }
} }
} }
@ -212,8 +209,7 @@ curl -XPOST 'localhost:9200/_search' -d {
of each suggestion. of each suggestion.
<3> An additional `field_name` variable has been specified in <3> An additional `field_name` variable has been specified in
`params` and is used by the `match` query. `params` and is used by the `match` query.
<4> The default `preference` has been changed to `_primary`. <4> All suggestions will be returned with an extra `collate_match`
<5> All suggestions will be returned with an extra `collate_match`
option indicating whether the generated phrase matched any option indicating whether the generated phrase matched any
document. document.

View File

@ -226,7 +226,7 @@
<dependency> <dependency>
<groupId>com.carrotsearch</groupId> <groupId>com.carrotsearch</groupId>
<artifactId>hppc</artifactId> <artifactId>hppc</artifactId>
<version>0.6.0</version> <version>0.7.1</version>
</dependency> </dependency>
<dependency> <dependency>
@ -462,6 +462,7 @@
<argument>-XX:+HeapDumpOnOutOfMemoryError</argument> <argument>-XX:+HeapDumpOnOutOfMemoryError</argument>
<argument>-XX:+DisableExplicitGC</argument> <argument>-XX:+DisableExplicitGC</argument>
<argument>-Dfile.encoding=UTF-8</argument> <argument>-Dfile.encoding=UTF-8</argument>
<argument>-Djna.nosys=true</argument>
<argument>-Delasticsearch</argument> <argument>-Delasticsearch</argument>
</arguments> </arguments>
</configuration> </configuration>
@ -609,6 +610,8 @@
<tests.timeoutSuite>${tests.timeoutSuite}</tests.timeoutSuite> <tests.timeoutSuite>${tests.timeoutSuite}</tests.timeoutSuite>
<tests.showSuccess>${tests.showSuccess}</tests.showSuccess> <tests.showSuccess>${tests.showSuccess}</tests.showSuccess>
<tests.integration>${tests.integration}</tests.integration> <tests.integration>${tests.integration}</tests.integration>
<tests.thirdparty>${tests.thirdparty}</tests.thirdparty>
<tests.config>${tests.config}</tests.config>
<tests.client.ratio>${tests.client.ratio}</tests.client.ratio> <tests.client.ratio>${tests.client.ratio}</tests.client.ratio>
<tests.enable_mock_modules>${tests.enable_mock_modules}</tests.enable_mock_modules> <tests.enable_mock_modules>${tests.enable_mock_modules}</tests.enable_mock_modules>
<tests.assertion.disabled>${tests.assertion.disabled}</tests.assertion.disabled> <tests.assertion.disabled>${tests.assertion.disabled}</tests.assertion.disabled>

View File

@ -37,7 +37,7 @@
"description" : "With `true`, specify that a local shard should be used if available, with `false`, use a random shard (default: true)" "description" : "With `true`, specify that a local shard should be used if available, with `false`, use a random shard (default: true)"
}, },
"text": { "text": {
"type" : "string", "type" : "list",
"description" : "The text on which the analysis should be performed (when request body is not used)" "description" : "The text on which the analysis should be performed (when request body is not used)"
}, },
"tokenizer": { "tokenizer": {

View File

@ -32,14 +32,6 @@
"type" : "boolean", "type" : "boolean",
"description" : "Clear filter caches" "description" : "Clear filter caches"
}, },
"id": {
"type" : "boolean",
"description" : "Clear ID caches for parent/child"
},
"id_cache": {
"type" : "boolean",
"description" : "Clear ID caches for parent/child"
},
"ignore_unavailable": { "ignore_unavailable": {
"type" : "boolean", "type" : "boolean",
"description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)"

View File

@ -17,7 +17,7 @@
}, },
"metric" : { "metric" : {
"type" : "list", "type" : "list",
"options" : ["_all", "completion", "docs", "fielddata", "filter_cache", "flush", "get", "id_cache", "indexing", "merge", "percolate", "query_cache", "refresh", "search", "segments", "store", "warmer", "suggest"], "options" : ["_all", "completion", "docs", "fielddata", "filter_cache", "flush", "get", "indexing", "merge", "percolate", "query_cache", "refresh", "search", "segments", "store", "warmer", "suggest"],
"description" : "Limit the information returned the specific metrics." "description" : "Limit the information returned the specific metrics."
} }
}, },

View File

@ -20,7 +20,7 @@
}, },
"index_metric" : { "index_metric" : {
"type" : "list", "type" : "list",
"options" : ["_all", "completion", "docs", "fielddata", "filter_cache", "flush", "get", "id_cache", "indexing", "merge", "percolate", "query_cache", "refresh", "search", "segments", "store", "warmer", "suggest"], "options" : ["_all", "completion", "docs", "fielddata", "filter_cache", "flush", "get", "indexing", "merge", "percolate", "query_cache", "refresh", "search", "segments", "store", "warmer", "suggest"],
"description" : "Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified." "description" : "Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified."
}, },
"node_id": { "node_id": {

View File

@ -29,7 +29,6 @@
get.exists_total .+ \n get.exists_total .+ \n
get.missing_time .+ \n get.missing_time .+ \n
get.missing_total .+ \n get.missing_total .+ \n
id_cache.memory_size .+ \n
indexing.delete_current .+ \n indexing.delete_current .+ \n
indexing.delete_time .+ \n indexing.delete_time .+ \n
indexing.delete_total .+ \n indexing.delete_total .+ \n

View File

@ -63,3 +63,11 @@ setup:
body: { "text": "Bar Foo", "filters": ["lowercase"], "tokenizer": keyword } body: { "text": "Bar Foo", "filters": ["lowercase"], "tokenizer": keyword }
- length: {tokens: 1 } - length: {tokens: 1 }
- match: { tokens.0.token: bar foo } - match: { tokens.0.token: bar foo }
---
"Array text":
- do:
indices.analyze:
body: { "text": ["Foo Bar", "Baz"], "filters": ["lowercase"], "tokenizer": keyword }
- length: {tokens: 2 }
- match: { tokens.0.token: foo bar }
- match: { tokens.1.token: baz }

View File

@ -30,7 +30,6 @@ setup:
- is_true: _all.total.flush - is_true: _all.total.flush
- is_true: _all.total.warmer - is_true: _all.total.warmer
- is_true: _all.total.filter_cache - is_true: _all.total.filter_cache
- is_true: _all.total.id_cache
- is_true: _all.total.fielddata - is_true: _all.total.fielddata
- is_true: _all.total.percolate - is_true: _all.total.percolate
- is_true: _all.total.completion - is_true: _all.total.completion
@ -54,7 +53,6 @@ setup:
- is_true: _all.total.flush - is_true: _all.total.flush
- is_true: _all.total.warmer - is_true: _all.total.warmer
- is_true: _all.total.filter_cache - is_true: _all.total.filter_cache
- is_true: _all.total.id_cache
- is_true: _all.total.fielddata - is_true: _all.total.fielddata
- is_true: _all.total.percolate - is_true: _all.total.percolate
- is_true: _all.total.completion - is_true: _all.total.completion
@ -78,7 +76,6 @@ setup:
- is_false: _all.total.flush - is_false: _all.total.flush
- is_false: _all.total.warmer - is_false: _all.total.warmer
- is_false: _all.total.filter_cache - is_false: _all.total.filter_cache
- is_false: _all.total.id_cache
- is_false: _all.total.fielddata - is_false: _all.total.fielddata
- is_false: _all.total.percolate - is_false: _all.total.percolate
- is_false: _all.total.completion - is_false: _all.total.completion
@ -102,7 +99,6 @@ setup:
- is_false: _all.total.flush - is_false: _all.total.flush
- is_false: _all.total.warmer - is_false: _all.total.warmer
- is_false: _all.total.filter_cache - is_false: _all.total.filter_cache
- is_false: _all.total.id_cache
- is_false: _all.total.fielddata - is_false: _all.total.fielddata
- is_false: _all.total.percolate - is_false: _all.total.percolate
- is_false: _all.total.completion - is_false: _all.total.completion
@ -127,7 +123,6 @@ setup:
- is_false: _all.total.flush - is_false: _all.total.flush
- is_false: _all.total.warmer - is_false: _all.total.warmer
- is_false: _all.total.filter_cache - is_false: _all.total.filter_cache
- is_false: _all.total.id_cache
- is_false: _all.total.fielddata - is_false: _all.total.fielddata
- is_false: _all.total.percolate - is_false: _all.total.percolate
- is_false: _all.total.completion - is_false: _all.total.completion

View File

@ -827,10 +827,7 @@ public class MapperQueryParser extends QueryParser {
private void applyBoost(String field, Query q) { private void applyBoost(String field, Query q) {
if (settings.boosts() != null) { if (settings.boosts() != null) {
float boost = 1f; float boost = settings.boosts().getOrDefault(field, 1f);
if (settings.boosts().containsKey(field)) {
boost = settings.boosts().lget();
}
q.setBoost(boost); q.setBoost(boost);
} }
} }

View File

@ -19,7 +19,7 @@
package org.apache.lucene.queryparser.classic; package org.apache.lucene.queryparser.classic;
import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.carrotsearch.hppc.ObjectFloatHashMap;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
@ -69,7 +69,7 @@ public class QueryParserSettings {
List<String> fields = null; List<String> fields = null;
Collection<String> queryTypes = null; Collection<String> queryTypes = null;
ObjectFloatOpenHashMap<String> boosts = null; ObjectFloatHashMap<String> boosts = null;
float tieBreaker = 0.0f; float tieBreaker = 0.0f;
boolean useDisMax = true; boolean useDisMax = true;
@ -286,11 +286,11 @@ public class QueryParserSettings {
this.queryTypes = queryTypes; this.queryTypes = queryTypes;
} }
public ObjectFloatOpenHashMap<String> boosts() { public ObjectFloatHashMap<String> boosts() {
return boosts; return boosts;
} }
public void boosts(ObjectFloatOpenHashMap<String> boosts) { public void boosts(ObjectFloatHashMap<String> boosts) {
this.boosts = boosts; this.boosts = boosts;
} }

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.lucene.search.suggest.analyzing; package org.apache.lucene.search.suggest.analyzing;
import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
@ -1008,7 +1008,7 @@ public long ramBytesUsed() {
private BytesRefBuilder analyzed = new BytesRefBuilder(); private BytesRefBuilder analyzed = new BytesRefBuilder();
private final SurfaceFormAndPayload[] surfaceFormsAndPayload; private final SurfaceFormAndPayload[] surfaceFormsAndPayload;
private int count; private int count;
private ObjectIntOpenHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f); private ObjectIntHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);
private int payloadSep; private int payloadSep;
public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) { public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) {
@ -1061,9 +1061,11 @@ public long ramBytesUsed() {
// dups: skip the rest: // dups: skip the rest:
return; return;
} }
BytesRef surfaceCopy; BytesRef surfaceCopy;
if (count > 0 && seenSurfaceForms.containsKey(surface)) { final int keySlot;
surfaceIndex = seenSurfaceForms.lget(); if (count > 0 && (keySlot = seenSurfaceForms.indexOf(surface)) >= 0) {
surfaceIndex = seenSurfaceForms.indexGet(keySlot);
SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex]; SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex];
if (encodedWeight >= surfaceFormAndPayload.weight) { if (encodedWeight >= surfaceFormAndPayload.weight) {
return; return;

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.admin.cluster.stats; package org.elasticsearch.action.admin.cluster.stats;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStats;
@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.cache.filter.FilterCacheStats; import org.elasticsearch.index.cache.filter.FilterCacheStats;
import org.elasticsearch.index.cache.id.IdCacheStats;
import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.percolator.stats.PercolateStats; import org.elasticsearch.index.percolator.stats.PercolateStats;
@ -48,7 +47,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
private StoreStats store; private StoreStats store;
private FieldDataStats fieldData; private FieldDataStats fieldData;
private FilterCacheStats filterCache; private FilterCacheStats filterCache;
private IdCacheStats idCache;
private CompletionStats completion; private CompletionStats completion;
private SegmentsStats segments; private SegmentsStats segments;
private PercolateStats percolate; private PercolateStats percolate;
@ -57,13 +55,12 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
} }
public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) { public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) {
ObjectObjectOpenHashMap<String, ShardStats> countsPerIndex = new ObjectObjectOpenHashMap<>(); ObjectObjectHashMap<String, ShardStats> countsPerIndex = new ObjectObjectHashMap<>();
this.docs = new DocsStats(); this.docs = new DocsStats();
this.store = new StoreStats(); this.store = new StoreStats();
this.fieldData = new FieldDataStats(); this.fieldData = new FieldDataStats();
this.filterCache = new FilterCacheStats(); this.filterCache = new FilterCacheStats();
this.idCache = new IdCacheStats();
this.completion = new CompletionStats(); this.completion = new CompletionStats();
this.segments = new SegmentsStats(); this.segments = new SegmentsStats();
this.percolate = new PercolateStats(); this.percolate = new PercolateStats();
@ -87,7 +84,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
store.add(shardCommonStats.store); store.add(shardCommonStats.store);
fieldData.add(shardCommonStats.fieldData); fieldData.add(shardCommonStats.fieldData);
filterCache.add(shardCommonStats.filterCache); filterCache.add(shardCommonStats.filterCache);
idCache.add(shardCommonStats.idCache);
completion.add(shardCommonStats.completion); completion.add(shardCommonStats.completion);
segments.add(shardCommonStats.segments); segments.add(shardCommonStats.segments);
percolate.add(shardCommonStats.percolate); percolate.add(shardCommonStats.percolate);
@ -125,10 +121,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
return filterCache; return filterCache;
} }
public IdCacheStats getIdCache() {
return idCache;
}
public CompletionStats getCompletion() { public CompletionStats getCompletion() {
return completion; return completion;
} }
@ -149,7 +141,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
store = StoreStats.readStoreStats(in); store = StoreStats.readStoreStats(in);
fieldData = FieldDataStats.readFieldDataStats(in); fieldData = FieldDataStats.readFieldDataStats(in);
filterCache = FilterCacheStats.readFilterCacheStats(in); filterCache = FilterCacheStats.readFilterCacheStats(in);
idCache = IdCacheStats.readIdCacheStats(in);
completion = CompletionStats.readCompletionStats(in); completion = CompletionStats.readCompletionStats(in);
segments = SegmentsStats.readSegmentsStats(in); segments = SegmentsStats.readSegmentsStats(in);
percolate = PercolateStats.readPercolateStats(in); percolate = PercolateStats.readPercolateStats(in);
@ -163,7 +154,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
store.writeTo(out); store.writeTo(out);
fieldData.writeTo(out); fieldData.writeTo(out);
filterCache.writeTo(out); filterCache.writeTo(out);
idCache.writeTo(out);
completion.writeTo(out); completion.writeTo(out);
segments.writeTo(out); segments.writeTo(out);
percolate.writeTo(out); percolate.writeTo(out);
@ -187,7 +177,6 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
store.toXContent(builder, params); store.toXContent(builder, params);
fieldData.toXContent(builder, params); fieldData.toXContent(builder, params);
filterCache.toXContent(builder, params); filterCache.toXContent(builder, params);
idCache.toXContent(builder, params);
completion.toXContent(builder, params); completion.toXContent(builder, params);
segments.toXContent(builder, params); segments.toXContent(builder, params);
percolate.toXContent(builder, params); percolate.toXContent(builder, params);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.admin.cluster.stats; package org.elasticsearch.action.admin.cluster.stats;
import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor; import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
@ -303,10 +303,10 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
int availableProcessors; int availableProcessors;
long availableMemory; long availableMemory;
ObjectIntOpenHashMap<OsInfo.Cpu> cpus; ObjectIntHashMap<OsInfo.Cpu> cpus;
public OsStats() { public OsStats() {
cpus = new ObjectIntOpenHashMap<>(); cpus = new ObjectIntHashMap<>();
} }
public void addNodeInfo(NodeInfo nodeInfo) { public void addNodeInfo(NodeInfo nodeInfo) {
@ -330,7 +330,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
return new ByteSizeValue(availableMemory); return new ByteSizeValue(availableMemory);
} }
public ObjectIntOpenHashMap<OsInfo.Cpu> getCpus() { public ObjectIntHashMap<OsInfo.Cpu> getCpus() {
return cpus; return cpus;
} }
@ -339,7 +339,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
availableProcessors = in.readVInt(); availableProcessors = in.readVInt();
availableMemory = in.readLong(); availableMemory = in.readLong();
int size = in.readVInt(); int size = in.readVInt();
cpus = new ObjectIntOpenHashMap<>(size); cpus = new ObjectIntHashMap<>(size);
for (; size > 0; size--) { for (; size > 0; size--) {
cpus.addTo(OsInfo.Cpu.readCpu(in), in.readVInt()); cpus.addTo(OsInfo.Cpu.readCpu(in), in.readVInt());
} }
@ -496,21 +496,21 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
public static class JvmStats implements Streamable, ToXContent { public static class JvmStats implements Streamable, ToXContent {
ObjectIntOpenHashMap<JvmVersion> versions; ObjectIntHashMap<JvmVersion> versions;
long threads; long threads;
long maxUptime; long maxUptime;
long heapUsed; long heapUsed;
long heapMax; long heapMax;
JvmStats() { JvmStats() {
versions = new ObjectIntOpenHashMap<>(); versions = new ObjectIntHashMap<>();
threads = 0; threads = 0;
maxUptime = 0; maxUptime = 0;
heapMax = 0; heapMax = 0;
heapUsed = 0; heapUsed = 0;
} }
public ObjectIntOpenHashMap<JvmVersion> getVersions() { public ObjectIntHashMap<JvmVersion> getVersions() {
return versions; return versions;
} }
@ -561,7 +561,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
int size = in.readVInt(); int size = in.readVInt();
versions = new ObjectIntOpenHashMap<>(size); versions = new ObjectIntHashMap<>(size);
for (; size > 0; size--) { for (; size > 0; size--) {
versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt()); versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
} }

View File

@ -56,8 +56,8 @@ public class TransportClusterStatsAction extends TransportNodesOperationAction<C
TransportClusterStatsAction.ClusterStatsNodeRequest, ClusterStatsNodeResponse> { TransportClusterStatsAction.ClusterStatsNodeRequest, ClusterStatsNodeResponse> {
private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store,
CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.FilterCache, CommonStatsFlags.Flag.IdCache, CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.FilterCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments,
CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments, CommonStatsFlags.Flag.Percolate); CommonStatsFlags.Flag.Percolate);
private final NodeService nodeService; private final NodeService nodeService;
private final IndicesService indicesService; private final IndicesService indicesService;

View File

@ -36,7 +36,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/ */
public class AnalyzeRequest extends SingleCustomOperationRequest<AnalyzeRequest> { public class AnalyzeRequest extends SingleCustomOperationRequest<AnalyzeRequest> {
private String text; private String[] text;
private String analyzer; private String analyzer;
@ -61,11 +61,11 @@ public class AnalyzeRequest extends SingleCustomOperationRequest<AnalyzeRequest>
this.index(index); this.index(index);
} }
public String text() { public String[] text() {
return this.text; return this.text;
} }
public AnalyzeRequest text(String text) { public AnalyzeRequest text(String... text) {
this.text = text; this.text = text;
return this; return this;
} }
@ -118,7 +118,7 @@ public class AnalyzeRequest extends SingleCustomOperationRequest<AnalyzeRequest>
@Override @Override
public ActionRequestValidationException validate() { public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate(); ActionRequestValidationException validationException = super.validate();
if (text == null) { if (text == null || text.length == 0) {
validationException = addValidationError("text is missing", validationException); validationException = addValidationError("text is missing", validationException);
} }
if (tokenFilters == null) { if (tokenFilters == null) {
@ -133,7 +133,7 @@ public class AnalyzeRequest extends SingleCustomOperationRequest<AnalyzeRequest>
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
text = in.readString(); text = in.readStringArray();
analyzer = in.readOptionalString(); analyzer = in.readOptionalString();
tokenizer = in.readOptionalString(); tokenizer = in.readOptionalString();
tokenFilters = in.readStringArray(); tokenFilters = in.readStringArray();
@ -144,7 +144,7 @@ public class AnalyzeRequest extends SingleCustomOperationRequest<AnalyzeRequest>
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(text); out.writeStringArray(text);
out.writeOptionalString(analyzer); out.writeOptionalString(analyzer);
out.writeOptionalString(tokenizer); out.writeOptionalString(tokenizer);
out.writeStringArray(tokenFilters); out.writeStringArray(tokenFilters);

View File

@ -30,7 +30,7 @@ public class AnalyzeRequestBuilder extends SingleCustomOperationRequestBuilder<A
super(client, action, new AnalyzeRequest()); super(client, action, new AnalyzeRequest());
} }
public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action, String index, String text) { public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action, String index, String... text) {
super(client, action, new AnalyzeRequest(index).text(text)); super(client, action, new AnalyzeRequest(index).text(text));
} }
@ -86,4 +86,12 @@ public class AnalyzeRequestBuilder extends SingleCustomOperationRequestBuilder<A
request.charFilters(charFilters); request.charFilters(charFilters);
return this; return this;
} }
/**
* Sets texts to analyze
*/
public AnalyzeRequestBuilder setText(String... texts) {
request.text(texts);
return this;
}
} }

View File

@ -25,6 +25,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
@ -210,37 +211,42 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction
List<AnalyzeResponse.AnalyzeToken> tokens = Lists.newArrayList(); List<AnalyzeResponse.AnalyzeToken> tokens = Lists.newArrayList();
TokenStream stream = null; TokenStream stream = null;
int lastPosition = -1;
int lastOffset = 0;
for (String text : request.text()) {
try { try {
stream = analyzer.tokenStream(field, request.text()); stream = analyzer.tokenStream(field, text);
stream.reset(); stream.reset();
CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class);
PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class);
OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class); OffsetAttribute offset = stream.addAttribute(OffsetAttribute.class);
TypeAttribute type = stream.addAttribute(TypeAttribute.class); TypeAttribute type = stream.addAttribute(TypeAttribute.class);
int position = -1;
while (stream.incrementToken()) { while (stream.incrementToken()) {
int increment = posIncr.getPositionIncrement(); int increment = posIncr.getPositionIncrement();
if (increment > 0) { if (increment > 0) {
position = position + increment; lastPosition = lastPosition + increment;
} }
tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), position, offset.startOffset(), offset.endOffset(), type.type())); tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), type.type()));
} }
stream.end(); stream.end();
lastOffset += offset.endOffset();
lastPosition += posIncr.getPositionIncrement();
lastPosition += analyzer.getPositionIncrementGap(field);
lastOffset += analyzer.getOffsetGap(field);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("failed to analyze", e); throw new ElasticsearchException("failed to analyze", e);
} finally { } finally {
if (stream != null) { IOUtils.closeWhileHandlingException(stream);
try {
stream.close();
} catch (IOException e) {
// ignore
} }
} }
if (closeAnalyzer) { if (closeAnalyzer) {
analyzer.close(); analyzer.close();
} }
}
return new AnalyzeResponse(tokens); return new AnalyzeResponse(tokens);
} }

View File

@ -33,7 +33,6 @@ public class ClearIndicesCacheRequest extends BroadcastOperationRequest<ClearInd
private boolean filterCache = false; private boolean filterCache = false;
private boolean fieldDataCache = false; private boolean fieldDataCache = false;
private boolean idCache = false;
private boolean recycler = false; private boolean recycler = false;
private boolean queryCache = false; private boolean queryCache = false;
private String[] fields = null; private String[] fields = null;
@ -82,10 +81,6 @@ public class ClearIndicesCacheRequest extends BroadcastOperationRequest<ClearInd
return this.fields; return this.fields;
} }
public boolean idCache() {
return this.idCache;
}
public ClearIndicesCacheRequest recycler(boolean recycler) { public ClearIndicesCacheRequest recycler(boolean recycler) {
this.recycler = recycler; this.recycler = recycler;
return this; return this;
@ -95,17 +90,11 @@ public class ClearIndicesCacheRequest extends BroadcastOperationRequest<ClearInd
return this.recycler; return this.recycler;
} }
public ClearIndicesCacheRequest idCache(boolean idCache) {
this.idCache = idCache;
return this;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
filterCache = in.readBoolean(); filterCache = in.readBoolean();
fieldDataCache = in.readBoolean(); fieldDataCache = in.readBoolean();
idCache = in.readBoolean();
recycler = in.readBoolean(); recycler = in.readBoolean();
fields = in.readStringArray(); fields = in.readStringArray();
queryCache = in.readBoolean(); queryCache = in.readBoolean();
@ -116,7 +105,6 @@ public class ClearIndicesCacheRequest extends BroadcastOperationRequest<ClearInd
super.writeTo(out); super.writeTo(out);
out.writeBoolean(filterCache); out.writeBoolean(filterCache);
out.writeBoolean(fieldDataCache); out.writeBoolean(fieldDataCache);
out.writeBoolean(idCache);
out.writeBoolean(recycler); out.writeBoolean(recycler);
out.writeStringArrayNullable(fields); out.writeStringArrayNullable(fields);
out.writeBoolean(queryCache); out.writeBoolean(queryCache);

View File

@ -51,8 +51,4 @@ public class ClearIndicesCacheRequestBuilder extends BroadcastOperationRequestBu
return this; return this;
} }
public ClearIndicesCacheRequestBuilder setIdCache(boolean idCache) {
request.idCache(idCache);
return this;
}
} }

View File

@ -34,7 +34,6 @@ class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest {
private boolean filterCache = false; private boolean filterCache = false;
private boolean fieldDataCache = false; private boolean fieldDataCache = false;
private boolean idCache = false;
private boolean recycler; private boolean recycler;
private boolean queryCache = false; private boolean queryCache = false;
@ -47,7 +46,6 @@ class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest {
super(shardId, request); super(shardId, request);
filterCache = request.filterCache(); filterCache = request.filterCache();
fieldDataCache = request.fieldDataCache(); fieldDataCache = request.fieldDataCache();
idCache = request.idCache();
fields = request.fields(); fields = request.fields();
recycler = request.recycler(); recycler = request.recycler();
queryCache = request.queryCache(); queryCache = request.queryCache();
@ -65,10 +63,6 @@ class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest {
return this.fieldDataCache; return this.fieldDataCache;
} }
public boolean idCache() {
return this.idCache;
}
public boolean recycler() { public boolean recycler() {
return this.recycler; return this.recycler;
} }
@ -77,17 +71,11 @@ class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest {
return this.fields; return this.fields;
} }
public ShardClearIndicesCacheRequest waitForOperations(boolean waitForOperations) {
this.filterCache = waitForOperations;
return this;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
filterCache = in.readBoolean(); filterCache = in.readBoolean();
fieldDataCache = in.readBoolean(); fieldDataCache = in.readBoolean();
idCache = in.readBoolean();
recycler = in.readBoolean(); recycler = in.readBoolean();
fields = in.readStringArray(); fields = in.readStringArray();
queryCache = in.readBoolean(); queryCache = in.readBoolean();
@ -98,7 +86,6 @@ class ShardClearIndicesCacheRequest extends BroadcastShardOperationRequest {
super.writeTo(out); super.writeTo(out);
out.writeBoolean(filterCache); out.writeBoolean(filterCache);
out.writeBoolean(fieldDataCache); out.writeBoolean(fieldDataCache);
out.writeBoolean(idCache);
out.writeBoolean(recycler); out.writeBoolean(recycler);
out.writeStringArrayNullable(fields); out.writeStringArrayNullable(fields);
out.writeBoolean(queryCache); out.writeBoolean(queryCache);

View File

@ -125,10 +125,6 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastOperatio
clearedAtLeastOne = true; clearedAtLeastOne = true;
// cacheRecycler.clear(); // cacheRecycler.clear();
} }
if (request.idCache()) {
clearedAtLeastOne = true;
service.fieldData().clearField(ParentFieldMapper.NAME);
}
if (!clearedAtLeastOne) { if (!clearedAtLeastOne) {
if (request.fields() != null && request.fields().length > 0) { if (request.fields() != null && request.fields().length > 0) {
// only clear caches relating to the specified fields // only clear caches relating to the specified fields

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.admin.indices.mapping.put; package org.elasticsearch.action.admin.indices.mapping.put;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
@ -51,7 +51,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/ */
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable { public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable {
private static ObjectOpenHashSet<String> RESERVED_FIELDS = ObjectOpenHashSet.from( private static ObjectHashSet<String> RESERVED_FIELDS = ObjectHashSet.from(
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index", "_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl" "_size", "_timestamp", "_ttl"
); );

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.cache.filter.FilterCacheStats; import org.elasticsearch.index.cache.filter.FilterCacheStats;
import org.elasticsearch.index.cache.id.IdCacheStats;
import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.FieldDataStats;
@ -92,9 +91,6 @@ public class CommonStats implements Streamable, ToXContent {
case FilterCache: case FilterCache:
filterCache = new FilterCacheStats(); filterCache = new FilterCacheStats();
break; break;
case IdCache:
idCache = new IdCacheStats();
break;
case FieldData: case FieldData:
fieldData = new FieldDataStats(); fieldData = new FieldDataStats();
break; break;
@ -161,9 +157,6 @@ public class CommonStats implements Streamable, ToXContent {
case FilterCache: case FilterCache:
filterCache = indexShard.filterCacheStats(); filterCache = indexShard.filterCacheStats();
break; break;
case IdCache:
idCache = indexShard.idCacheStats();
break;
case FieldData: case FieldData:
fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); fieldData = indexShard.fieldDataStats(flags.fieldDataFields());
break; break;
@ -224,9 +217,6 @@ public class CommonStats implements Streamable, ToXContent {
@Nullable @Nullable
public FilterCacheStats filterCache; public FilterCacheStats filterCache;
@Nullable
public IdCacheStats idCache;
@Nullable @Nullable
public FieldDataStats fieldData; public FieldDataStats fieldData;
@ -333,15 +323,6 @@ public class CommonStats implements Streamable, ToXContent {
filterCache.add(stats.getFilterCache()); filterCache.add(stats.getFilterCache());
} }
if (idCache == null) {
if (stats.getIdCache() != null) {
idCache = new IdCacheStats();
idCache.add(stats.getIdCache());
}
} else {
idCache.add(stats.getIdCache());
}
if (fieldData == null) { if (fieldData == null) {
if (stats.getFieldData() != null) { if (stats.getFieldData() != null) {
fieldData = new FieldDataStats(); fieldData = new FieldDataStats();
@ -458,11 +439,6 @@ public class CommonStats implements Streamable, ToXContent {
return this.filterCache; return this.filterCache;
} }
@Nullable
public IdCacheStats getIdCache() {
return this.idCache;
}
@Nullable @Nullable
public FieldDataStats getFieldData() { public FieldDataStats getFieldData() {
return this.fieldData; return this.fieldData;
@ -511,7 +487,7 @@ public class CommonStats implements Streamable, ToXContent {
/** /**
* Utility method which computes total memory by adding * Utility method which computes total memory by adding
* FieldData, IdCache, Percolate, Segments (memory, index writer, version map) * FieldData, Percolate, Segments (memory, index writer, version map)
*/ */
public ByteSizeValue getTotalMemory() { public ByteSizeValue getTotalMemory() {
long size = 0; long size = 0;
@ -521,9 +497,6 @@ public class CommonStats implements Streamable, ToXContent {
if (this.getFilterCache() != null) { if (this.getFilterCache() != null) {
size += this.getFilterCache().getMemorySizeInBytes(); size += this.getFilterCache().getMemorySizeInBytes();
} }
if (this.getIdCache() != null) {
size += this.getIdCache().getMemorySizeInBytes();
}
if (this.getPercolate() != null) { if (this.getPercolate() != null) {
size += this.getPercolate().getMemorySizeInBytes(); size += this.getPercolate().getMemorySizeInBytes();
} }
@ -568,9 +541,6 @@ public class CommonStats implements Streamable, ToXContent {
if (in.readBoolean()) { if (in.readBoolean()) {
filterCache = FilterCacheStats.readFilterCacheStats(in); filterCache = FilterCacheStats.readFilterCacheStats(in);
} }
if (in.readBoolean()) {
idCache = IdCacheStats.readIdCacheStats(in);
}
if (in.readBoolean()) { if (in.readBoolean()) {
fieldData = FieldDataStats.readFieldDataStats(in); fieldData = FieldDataStats.readFieldDataStats(in);
} }
@ -651,12 +621,6 @@ public class CommonStats implements Streamable, ToXContent {
out.writeBoolean(true); out.writeBoolean(true);
filterCache.writeTo(out); filterCache.writeTo(out);
} }
if (idCache == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
idCache.writeTo(out);
}
if (fieldData == null) { if (fieldData == null) {
out.writeBoolean(false); out.writeBoolean(false);
} else { } else {
@ -720,9 +684,6 @@ public class CommonStats implements Streamable, ToXContent {
if (filterCache != null) { if (filterCache != null) {
filterCache.toXContent(builder, params); filterCache.toXContent(builder, params);
} }
if (idCache != null) {
idCache.toXContent(builder, params);
}
if (fieldData != null) { if (fieldData != null) {
fieldData.toXContent(builder, params); fieldData.toXContent(builder, params);
} }

View File

@ -216,7 +216,6 @@ public class CommonStatsFlags implements Streamable, Cloneable {
Flush("flush"), Flush("flush"),
Refresh("refresh"), Refresh("refresh"),
FilterCache("filter_cache"), FilterCache("filter_cache"),
IdCache("id_cache"),
FieldData("fielddata"), FieldData("fielddata"),
Docs("docs"), Docs("docs"),
Warmer("warmer"), Warmer("warmer"),

View File

@ -175,15 +175,6 @@ public class IndicesStatsRequest extends BroadcastOperationRequest<IndicesStatsR
return flags.isSet(Flag.FilterCache); return flags.isSet(Flag.FilterCache);
} }
public IndicesStatsRequest idCache(boolean idCache) {
flags.set(Flag.IdCache, idCache);
return this;
}
public boolean idCache() {
return flags.isSet(Flag.IdCache);
}
public IndicesStatsRequest fieldData(boolean fieldData) { public IndicesStatsRequest fieldData(boolean fieldData) {
flags.set(Flag.FieldData, fieldData); flags.set(Flag.FieldData, fieldData);
return this; return this;

View File

@ -117,11 +117,6 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder
return this; return this;
} }
public IndicesStatsRequestBuilder setIdCache(boolean idCache) {
request.idCache(idCache);
return this;
}
public IndicesStatsRequestBuilder setFieldData(boolean fieldData) { public IndicesStatsRequestBuilder setFieldData(boolean fieldData) {
request.fieldData(fieldData); request.fieldData(fieldData);
return this; return this;

View File

@ -161,9 +161,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastOperationActi
if (request.request.filterCache()) { if (request.request.filterCache()) {
flags.set(CommonStatsFlags.Flag.FilterCache); flags.set(CommonStatsFlags.Flag.FilterCache);
} }
if (request.request.idCache()) {
flags.set(CommonStatsFlags.Flag.IdCache);
}
if (request.request.fieldData()) { if (request.request.fieldData()) {
flags.set(CommonStatsFlags.Flag.FieldData); flags.set(CommonStatsFlags.Flag.FieldData);
flags.fieldDataFields(request.request.fieldDataFields()); flags.fieldDataFields(request.request.fieldDataFields());

View File

@ -30,11 +30,9 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -227,13 +225,7 @@ public class PercolateRequest extends BroadcastOperationRequest<PercolateRequest
* This is the preferred way to set the request body. * This is the preferred way to set the request body.
*/ */
public PercolateRequest source(PercolateSourceBuilder sourceBuilder) { public PercolateRequest source(PercolateSourceBuilder sourceBuilder) {
try { this.source = sourceBuilder.buildAsBytes(Requests.CONTENT_TYPE);
XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
sourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
this.source = builder.bytes();
} catch (Exception e) {
throw new SearchSourceBuilderException("Failed to build search source", e);
}
return this; return this;
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.action.percolate;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.client.Requests; import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -44,7 +45,7 @@ import java.util.Map;
/** /**
* Builder to create the percolate request body. * Builder to create the percolate request body.
*/ */
public class PercolateSourceBuilder implements ToXContent { public class PercolateSourceBuilder extends ToXContentToBytes {
private DocBuilder docBuilder; private DocBuilder docBuilder;
private QueryBuilder queryBuilder; private QueryBuilder queryBuilder;

View File

@ -130,27 +130,26 @@ public class TransportSuggestAction extends TransportBroadcastOperationAction<Su
protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) { protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) {
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.shardSafe(request.shardId().id()); IndexShard indexShard = indexService.shardSafe(request.shardId().id());
final Engine.Searcher searcher = indexShard.acquireSearcher("suggest");
ShardSuggestService shardSuggestService = indexShard.shardSuggestService(); ShardSuggestService shardSuggestService = indexShard.shardSuggestService();
shardSuggestService.preSuggest(); shardSuggestService.preSuggest();
long startTime = System.nanoTime(); long startTime = System.nanoTime();
XContentParser parser = null; XContentParser parser = null;
try { try (Engine.Searcher searcher = indexShard.acquireSearcher("suggest")) {
BytesReference suggest = request.suggest(); BytesReference suggest = request.suggest();
if (suggest != null && suggest.length() > 0) { if (suggest != null && suggest.length() > 0) {
parser = XContentFactory.xContent(suggest).createParser(suggest); parser = XContentFactory.xContent(suggest).createParser(suggest);
if (parser.nextToken() != XContentParser.Token.START_OBJECT) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("suggest content missing"); throw new IllegalArgumentException("suggest content missing");
} }
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), request.shardId().getIndex(), request.shardId().id()); final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
final Suggest result = suggestPhase.execute(context, searcher.reader()); indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id());
final Suggest result = suggestPhase.execute(context, searcher.searcher());
return new ShardSuggestResponse(request.shardId(), result); return new ShardSuggestResponse(request.shardId(), result);
} }
return new ShardSuggestResponse(request.shardId(), new Suggest()); return new ShardSuggestResponse(request.shardId(), new Suggest());
} catch (Throwable ex) { } catch (Throwable ex) {
throw new ElasticsearchException("failed to execute suggest", ex); throw new ElasticsearchException("failed to execute suggest", ex);
} finally { } finally {
searcher.close();
if (parser != null) { if (parser != null) {
parser.close(); parser.close();
} }

View File

@ -19,18 +19,14 @@
package org.elasticsearch.action.support; package org.elasticsearch.action.support;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilderException;
import java.io.IOException; import java.io.IOException;
public class QuerySourceBuilder implements ToXContent { public class QuerySourceBuilder extends ToXContentToBytes {
private QueryBuilder queryBuilder; private QueryBuilder queryBuilder;
@ -68,25 +64,4 @@ public class QuerySourceBuilder implements ToXContent {
} }
} }
} }
public BytesReference buildAsBytes(XContentType contentType) throws SearchSourceBuilderException {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
toXContent(builder, ToXContent.EMPTY_PARAMS);
return builder.bytes();
} catch (Exception e) {
throw new SearchSourceBuilderException("Failed to build search source", e);
}
}
@Override
public String toString() {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint();
toXContent(builder, ToXContent.EMPTY_PARAMS);
return builder.string();
} catch (Exception e) {
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
}
}
} }

View File

@ -0,0 +1,81 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
/**
* Base class for {@link ToXContent} implementation that also support conversion to {@link BytesReference} for serialization purposes
*/
public abstract class ToXContentToBytes implements ToXContent {
private final XContentType defaultType;
protected ToXContentToBytes() {
this.defaultType = Requests.CONTENT_TYPE;
}
protected ToXContentToBytes(XContentType defaultType) {
this.defaultType = defaultType;
}
/**
* Returns a {@link org.elasticsearch.common.bytes.BytesReference}
* containing the {@link ToXContent} output in binary format.
* Builds the request based on the default {@link XContentType}, either {@link Requests#CONTENT_TYPE} or provided as a constructor argument
*/
public final BytesReference buildAsBytes() {
return buildAsBytes(defaultType);
}
/**
* Returns a {@link org.elasticsearch.common.bytes.BytesReference}
* containing the {@link ToXContent} output in binary format.
* Builds the request as the provided <code>contentType</code>
*/
public final BytesReference buildAsBytes(XContentType contentType) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
toXContent(builder, ToXContent.EMPTY_PARAMS);
return builder.bytes();
} catch (Exception e) {
throw new ElasticsearchException("Failed to build ToXContent", e);
}
}
@Override
public final String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.prettyPrint();
toXContent(builder, EMPTY_PARAMS);
return builder.string();
} catch (Exception e) {
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.action.termvectors; package org.elasticsearch.action.termvectors;
import com.carrotsearch.hppc.ObjectLongOpenHashMap; import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor; import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.PostingsEnum;
@ -113,7 +113,7 @@ import static org.apache.lucene.util.ArrayUtil.grow;
public final class TermVectorsFields extends Fields { public final class TermVectorsFields extends Fields {
private final ObjectLongOpenHashMap<String> fieldMap; private final ObjectLongHashMap<String> fieldMap;
private final BytesReference termVectors; private final BytesReference termVectors;
final boolean hasTermStatistic; final boolean hasTermStatistic;
final boolean hasFieldStatistic; final boolean hasFieldStatistic;
@ -126,8 +126,7 @@ public final class TermVectorsFields extends Fields {
*/ */
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException { public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
StreamInput header = StreamInput.wrap(headerRef.toBytesArray()); StreamInput header = StreamInput.wrap(headerRef.toBytesArray());
fieldMap = new ObjectLongOpenHashMap<>(); fieldMap = new ObjectLongHashMap<>();
// here we read the header to fill the field offset map // here we read the header to fill the field offset map
String headerString = header.readString(); String headerString = header.readString();
assert headerString.equals("TV"); assert headerString.equals("TV");
@ -170,10 +169,11 @@ public final class TermVectorsFields extends Fields {
public Terms terms(String field) throws IOException { public Terms terms(String field) throws IOException {
// first, find where in the termVectors bytes the actual term vector for // first, find where in the termVectors bytes the actual term vector for
// this field is stored // this field is stored
if (!fieldMap.containsKey(field)) { final int keySlot = fieldMap.indexOf(field);
if (keySlot < 0) {
return null; // we don't have it. return null; // we don't have it.
} }
long readOffset = fieldMap.lget(); long readOffset = fieldMap.indexGet(keySlot);
return new TermVector(termVectors, readOffset); return new TermVector(termVectors, readOffset);
} }

View File

@ -587,6 +587,12 @@ public interface IndicesAdminClient extends ElasticsearchClient {
*/ */
AnalyzeRequestBuilder prepareAnalyze(String text); AnalyzeRequestBuilder prepareAnalyze(String text);
/**
* Analyze text/texts.
*
*/
AnalyzeRequestBuilder prepareAnalyze();
/** /**
* Puts an index template. * Puts an index template.
*/ */

View File

@ -1478,6 +1478,11 @@ public abstract class AbstractClient extends AbstractComponent implements Client
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE, null, text); return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE, null, text);
} }
@Override
public AnalyzeRequestBuilder prepareAnalyze() {
return new AnalyzeRequestBuilder(this, AnalyzeAction.INSTANCE);
}
@Override @Override
public ActionFuture<PutIndexTemplateResponse> putTemplate(final PutIndexTemplateRequest request) { public ActionFuture<PutIndexTemplateResponse> putTemplate(final PutIndexTemplateRequest request) {
return execute(PutIndexTemplateAction.INSTANCE, request); return execute(PutIndexTemplateAction.INSTANCE, request);

View File

@ -20,11 +20,13 @@
package org.elasticsearch.cluster.metadata; package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.ObjectArrayList; import com.carrotsearch.hppc.ObjectArrayList;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.*; import com.google.common.collect.*;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.*;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.DiffableUtils.KeyedReader; import org.elasticsearch.cluster.DiffableUtils.KeyedReader;
@ -295,7 +297,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
boolean matchAllAliases = matchAllAliases(aliases); boolean matchAllAliases = matchAllAliases(aliases);
ImmutableOpenMap.Builder<String, ImmutableList<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<String, ImmutableList<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys()); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) { for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index); IndexMetaData indexMetaData = indices.get(index);
List<AliasMetaData> filteredValues = Lists.newArrayList(); List<AliasMetaData> filteredValues = Lists.newArrayList();
@ -307,6 +309,13 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
} }
if (!filteredValues.isEmpty()) { if (!filteredValues.isEmpty()) {
// Make the list order deterministic
CollectionUtil.timSort(filteredValues, new Comparator<AliasMetaData>() {
@Override
public int compare(AliasMetaData o1, AliasMetaData o2) {
return o1.alias().compareTo(o2.alias());
}
});
mapBuilder.put(index, ImmutableList.copyOf(filteredValues)); mapBuilder.put(index, ImmutableList.copyOf(filteredValues));
} }
} }
@ -337,7 +346,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
return false; return false;
} }
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys()); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) { for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index); IndexMetaData indexMetaData = indices.get(index);
List<AliasMetaData> filteredValues = Lists.newArrayList(); List<AliasMetaData> filteredValues = Lists.newArrayList();
@ -368,7 +377,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
} }
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys()); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) { for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index); IndexMetaData indexMetaData = indices.get(index);
ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings; ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
@ -400,7 +409,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
final String[] warmers = Strings.isAllOrWildcard(uncheckedWarmers) ? Strings.EMPTY_ARRAY : uncheckedWarmers; final String[] warmers = Strings.isAllOrWildcard(uncheckedWarmers) ? Strings.EMPTY_ARRAY : uncheckedWarmers;
ImmutableOpenMap.Builder<String, ImmutableList<IndexWarmersMetaData.Entry>> mapBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<String, ImmutableList<IndexWarmersMetaData.Entry>> mapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys()); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) { for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index); IndexMetaData indexMetaData = indices.get(index);
IndexWarmersMetaData indexWarmersMetaData = indexMetaData.custom(IndexWarmersMetaData.TYPE); IndexWarmersMetaData indexWarmersMetaData = indexMetaData.custom(IndexWarmersMetaData.TYPE);

View File

@ -85,16 +85,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
final int dash = autoExpandReplicas.indexOf('-'); final int dash = autoExpandReplicas.indexOf('-');
if (-1 == dash) { if (-1 == dash) {
logger.warn("Unexpected value [{}] for setting [{}]; it should be dash delimited", logger.warn("failed to set [{}] for index [{}], it should be dash delimited [{}]",
autoExpandReplicas, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS); IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), autoExpandReplicas);
continue; continue;
} }
final String sMin = autoExpandReplicas.substring(0, dash); final String sMin = autoExpandReplicas.substring(0, dash);
try { try {
min = Integer.parseInt(sMin); min = Integer.parseInt(sMin);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
logger.warn("failed to set [{}], minimum value is not a number [{}]", logger.warn("failed to set [{}] for index [{}], minimum value is not a number [{}]",
e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, sMin); e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), sMin);
continue; continue;
} }
String sMax = autoExpandReplicas.substring(dash + 1); String sMax = autoExpandReplicas.substring(dash + 1);
@ -104,8 +104,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
try { try {
max = Integer.parseInt(sMax); max = Integer.parseInt(sMax);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
logger.warn("failed to set [{}], maximum value is neither [{}] nor a number [{}]", logger.warn("failed to set [{}] for index [{}], maximum value is neither [{}] nor a number [{}]",
e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, ALL_NODES_VALUE, sMax); e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), ALL_NODES_VALUE, sMax);
continue; continue;
} }
} }

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.node; package org.elasticsearch.cluster.node;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -334,7 +334,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
} }
return nodesIds; return nodesIds;
} else { } else {
ObjectOpenHashSet<String> resolvedNodesIds = new ObjectOpenHashSet<>(nodesIds.length); ObjectHashSet<String> resolvedNodesIds = new ObjectHashSet<>(nodesIds.length);
for (String nodeId : nodesIds) { for (String nodeId : nodesIds) {
if (nodeId.equals("_local")) { if (nodeId.equals("_local")) {
String localNodeId = localNodeId(); String localNodeId = localNodeId();

View File

@ -25,6 +25,8 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.collect.UnmodifiableIterator; import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
@ -35,11 +37,13 @@ import org.elasticsearch.index.shard.ShardId;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Lists.*;
/** /**
* The {@link IndexRoutingTable} represents routing information for a single * The {@link IndexRoutingTable} represents routing information for a single
@ -540,7 +544,26 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
public String prettyPrint() { public String prettyPrint() {
StringBuilder sb = new StringBuilder("-- index [" + index + "]\n"); StringBuilder sb = new StringBuilder("-- index [" + index + "]\n");
List<IndexShardRoutingTable> ordered = new ArrayList<>();
for (IndexShardRoutingTable indexShard : this) { for (IndexShardRoutingTable indexShard : this) {
ordered.add(indexShard);
}
CollectionUtil.timSort(ordered, new Comparator<IndexShardRoutingTable>() {
@Override
public int compare(IndexShardRoutingTable o1, IndexShardRoutingTable o2) {
int v = o1.shardId().index().name().compareTo(
o2.shardId().index().name());
if (v == 0) {
v = Integer.compare(o1.shardId().id(),
o2.shardId().id());
}
return v;
}
});
for (IndexShardRoutingTable indexShard : ordered) {
sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n"); sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n");
for (ShardRouting shard : indexShard) { for (ShardRouting shard : indexShard) {
sb.append("--------").append(shard.shortSummary()).append("\n"); sb.append("--------").append(shard.shortSummary()).append("\n");

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.routing; package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.*; import com.google.common.collect.*;
@ -64,7 +64,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
private Set<ShardId> clearPostAllocationFlag; private Set<ShardId> clearPostAllocationFlag;
private final Map<String, ObjectIntOpenHashMap<String>> nodesPerAttributeNames = new HashMap<>(); private final Map<String, ObjectIntHashMap<String>> nodesPerAttributeNames = new HashMap<>();
public RoutingNodes(ClusterState clusterState) { public RoutingNodes(ClusterState clusterState) {
this.metaData = clusterState.metaData(); this.metaData = clusterState.metaData();
@ -208,12 +208,12 @@ public class RoutingNodes implements Iterable<RoutingNode> {
return nodesToShards.get(nodeId); return nodesToShards.get(nodeId);
} }
public ObjectIntOpenHashMap<String> nodesPerAttributesCounts(String attributeName) { public ObjectIntHashMap<String> nodesPerAttributesCounts(String attributeName) {
ObjectIntOpenHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName); ObjectIntHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName);
if (nodesPerAttributesCounts != null) { if (nodesPerAttributesCounts != null) {
return nodesPerAttributesCounts; return nodesPerAttributesCounts;
} }
nodesPerAttributesCounts = new ObjectIntOpenHashMap<>(); nodesPerAttributesCounts = new ObjectIntHashMap<>();
for (RoutingNode routingNode : this) { for (RoutingNode routingNode : this) {
String attrValue = routingNode.node().attributes().get(attributeName); String attrValue = routingNode.node().attributes().get(attributeName);
nodesPerAttributesCounts.addTo(attrValue, 1); nodesPerAttributesCounts.addTo(attrValue, 1);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.cluster.routing.allocation.decider; package org.elasticsearch.cluster.routing.allocation.decider;
import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.MutableShardRouting; import org.elasticsearch.cluster.routing.MutableShardRouting;
@ -182,10 +182,10 @@ public class AwarenessAllocationDecider extends AllocationDecider {
} }
// build attr_value -> nodes map // build attr_value -> nodes map
ObjectIntOpenHashMap<String> nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute); ObjectIntHashMap<String> nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute);
// build the count of shards per attribute value // build the count of shards per attribute value
ObjectIntOpenHashMap<String> shardPerAttribute = new ObjectIntOpenHashMap<>(); ObjectIntHashMap<String> shardPerAttribute = new ObjectIntHashMap<>();
for (MutableShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting)) { for (MutableShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting)) {
// if the shard is relocating, then make sure we count it as part of the node it is relocating to // if the shard is relocating, then make sure we count it as part of the node it is relocating to
if (assignedShard.relocating()) { if (assignedShard.relocating()) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.common; package org.elasticsearch.common;
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer; import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
/** /**
@ -28,7 +28,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
*/ */
public class ContextHolder { public class ContextHolder {
private ObjectObjectOpenHashMap<Object, Object> context; private ObjectObjectHashMap<Object, Object> context;
/** /**
* Attaches the given value to the context. * Attaches the given value to the context.
@ -39,7 +39,7 @@ public class ContextHolder {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public final synchronized <V> V putInContext(Object key, Object value) { public final synchronized <V> V putInContext(Object key, Object value) {
if (context == null) { if (context == null) {
context = new ObjectObjectOpenHashMap<>(2); context = new ObjectObjectHashMap<>(2);
} }
return (V) context.put(key, value); return (V) context.put(key, value);
} }
@ -52,7 +52,7 @@ public class ContextHolder {
return; return;
} }
if (context == null) { if (context == null) {
context = new ObjectObjectOpenHashMap<>(map); context = new ObjectObjectHashMap<>(map);
} else { } else {
context.putAll(map); context.putAll(map);
} }
@ -120,7 +120,7 @@ public class ContextHolder {
return; return;
} }
if (context == null) { if (context == null) {
context = new ObjectObjectOpenHashMap<>(other.context); context = new ObjectObjectHashMap<>(other.context);
} else { } else {
context.putAll(other.context); context.putAll(other.context);
} }

View File

@ -19,9 +19,9 @@
package org.elasticsearch.common.collect; package org.elasticsearch.common.collect;
import com.carrotsearch.hppc.ObjectIntOpenHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectLookupContainer; import com.carrotsearch.hppc.ObjectLookupContainer;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import java.util.Iterator; import java.util.Iterator;
@ -34,40 +34,50 @@ public final class HppcMaps {
} }
/** /**
* Returns a new map with the given initial capacity * Returns a new map with the given number of expected elements.
*
* @param expectedElements
* The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/ */
public static <K, V> ObjectObjectOpenHashMap<K, V> newMap(int capacity) { public static <K, V> ObjectObjectHashMap<K, V> newMap(int expectedElements) {
return new ObjectObjectOpenHashMap<>(capacity); return new ObjectObjectHashMap<>(expectedElements);
} }
/** /**
* Returns a new map with a default initial capacity of * Returns a new map with a default initial capacity.
* {@value com.carrotsearch.hppc.HashContainerUtils#DEFAULT_CAPACITY}
*/ */
public static <K, V> ObjectObjectOpenHashMap<K, V> newMap() { public static <K, V> ObjectObjectHashMap<K, V> newMap() {
return newMap(16); return newMap(16);
} }
/** /**
* Returns a map like {@link #newMap()} that does not accept <code>null</code> keys * Returns a map like {@link #newMap()} that does not accept <code>null</code> keys
*/ */
public static <K, V> ObjectObjectOpenHashMap<K, V> newNoNullKeysMap() { public static <K, V> ObjectObjectHashMap<K, V> newNoNullKeysMap() {
return ensureNoNullKeys(16); return ensureNoNullKeys(16);
} }
/** /**
* Returns a map like {@link #newMap(int)} that does not accept <code>null</code> keys * Returns a map like {@link #newMap(int)} that does not accept <code>null</code> keys
*
* @param expectedElements
* The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/ */
public static <K, V> ObjectObjectOpenHashMap<K, V> newNoNullKeysMap(int capacity) { public static <K, V> ObjectObjectHashMap<K, V> newNoNullKeysMap(int expectedElements) {
return ensureNoNullKeys(capacity); return ensureNoNullKeys(expectedElements);
} }
/** /**
* Wraps the given map and prevent adding of <code>null</code> keys. * Wraps the given map and prevent adding of <code>null</code> keys.
*
* @param expectedElements
* The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/ */
public static <K, V> ObjectObjectOpenHashMap<K, V> ensureNoNullKeys(int capacity) { public static <K, V> ObjectObjectHashMap<K, V> ensureNoNullKeys(int expectedElements) {
return new ObjectObjectOpenHashMap<K, V>(capacity) { return new ObjectObjectHashMap<K, V>(expectedElements) {
@Override @Override
public V put(K key, V value) { public V put(K key, V value) {
if (key == null) { if (key == null) {
@ -75,12 +85,11 @@ public final class HppcMaps {
} }
return super.put(key, value); return super.put(key, value);
} }
}; };
} }
/** /**
* @return an intersection view over the two specified containers (which can be KeyContainer or ObjectOpenHashSet). * @return an intersection view over the two specified containers (which can be KeyContainer or ObjectHashSet).
*/ */
// Hppc has forEach, but this means we need to build an intermediate set, with this method we just iterate // Hppc has forEach, but this means we need to build an intermediate set, with this method we just iterate
// over each unique value without creating a third set. // over each unique value without creating a third set.
@ -124,12 +133,9 @@ public final class HppcMaps {
} }
public final static class Object { public final static class Object {
public final static class Integer { public final static class Integer {
public static <V> ObjectIntHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) {
public static <V> ObjectIntOpenHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) { return new ObjectIntHashMap<V>(capacity, loadFactor) {
return new ObjectIntOpenHashMap<V>(capacity, loadFactor) {
@Override @Override
public int put(V key, int value) { public int put(V key, int value) {
if (key == null) { if (key == null) {
@ -139,9 +145,6 @@ public final class HppcMaps {
} }
}; };
} }
} }
} }
} }

View File

@ -23,6 +23,7 @@ import com.carrotsearch.hppc.*;
import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntCursor;
import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.predicates.IntObjectPredicate;
import com.carrotsearch.hppc.predicates.IntPredicate; import com.carrotsearch.hppc.predicates.IntPredicate;
import com.carrotsearch.hppc.procedures.IntObjectProcedure; import com.carrotsearch.hppc.procedures.IntObjectProcedure;
import com.google.common.collect.UnmodifiableIterator; import com.google.common.collect.UnmodifiableIterator;
@ -38,9 +39,9 @@ import java.util.Map;
*/ */
public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCursor<VType>> { public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCursor<VType>> {
private final IntObjectOpenHashMap<VType> map; private final IntObjectHashMap<VType> map;
private ImmutableOpenIntMap(IntObjectOpenHashMap<VType> map) { private ImmutableOpenIntMap(IntObjectHashMap<VType> map) {
this.map = map; this.map = map;
} }
@ -175,7 +176,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static final ImmutableOpenIntMap EMPTY = new ImmutableOpenIntMap(new IntObjectOpenHashMap()); private static final ImmutableOpenIntMap EMPTY = new ImmutableOpenIntMap(new IntObjectHashMap());
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <VType> ImmutableOpenIntMap<VType> of() { public static <VType> ImmutableOpenIntMap<VType> of() {
@ -196,7 +197,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
public static class Builder<VType> implements IntObjectMap<VType> { public static class Builder<VType> implements IntObjectMap<VType> {
private IntObjectOpenHashMap<VType> map; private IntObjectHashMap<VType> map;
public Builder() { public Builder() {
//noinspection unchecked //noinspection unchecked
@ -204,7 +205,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
} }
public Builder(int size) { public Builder(int size) {
this.map = new IntObjectOpenHashMap<>(size); this.map = new IntObjectHashMap<>(size);
} }
public Builder(ImmutableOpenIntMap<VType> map) { public Builder(ImmutableOpenIntMap<VType> map) {
@ -215,7 +216,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
* Builds a new instance of the * Builds a new instance of the
*/ */
public ImmutableOpenIntMap<VType> build() { public ImmutableOpenIntMap<VType> build() {
IntObjectOpenHashMap<VType> map = this.map; IntObjectHashMap<VType> map = this.map;
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest) this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
return new ImmutableOpenIntMap<>(map); return new ImmutableOpenIntMap<>(map);
} }
@ -325,5 +326,50 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
public ObjectContainer<VType> values() { public ObjectContainer<VType> values() {
return map.values(); return map.values();
} }
@Override
public int removeAll(IntObjectPredicate<? super VType> predicate) {
return map.removeAll(predicate);
}
@Override
public <T extends IntObjectPredicate<? super VType>> T forEach(T predicate) {
return map.forEach(predicate);
}
@Override
public int indexOf(int key) {
return map.indexOf(key);
}
@Override
public boolean indexExists(int index) {
return map.indexExists(index);
}
@Override
public VType indexGet(int index) {
return map.indexGet(index);
}
@Override
public VType indexReplace(int index, VType newValue) {
return map.indexReplace(index, newValue);
}
@Override
public void indexInsert(int index, int key, VType value) {
map.indexInsert(index, key, value);
}
@Override
public void release() {
map.release();
}
@Override
public String visualizeKeyDistribution(int characters) {
return map.visualizeKeyDistribution(characters);
}
} }
} }

View File

@ -23,6 +23,8 @@ import com.carrotsearch.hppc.*;
import com.carrotsearch.hppc.cursors.LongCursor; import com.carrotsearch.hppc.cursors.LongCursor;
import com.carrotsearch.hppc.cursors.LongObjectCursor; import com.carrotsearch.hppc.cursors.LongObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.predicates.IntObjectPredicate;
import com.carrotsearch.hppc.predicates.LongObjectPredicate;
import com.carrotsearch.hppc.predicates.LongPredicate; import com.carrotsearch.hppc.predicates.LongPredicate;
import com.carrotsearch.hppc.procedures.LongObjectProcedure; import com.carrotsearch.hppc.procedures.LongObjectProcedure;
import com.google.common.collect.UnmodifiableIterator; import com.google.common.collect.UnmodifiableIterator;
@ -38,9 +40,9 @@ import java.util.Map;
*/ */
public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCursor<VType>> { public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCursor<VType>> {
private final LongObjectOpenHashMap<VType> map; private final LongObjectHashMap<VType> map;
private ImmutableOpenLongMap(LongObjectOpenHashMap<VType> map) { private ImmutableOpenLongMap(LongObjectHashMap<VType> map) {
this.map = map; this.map = map;
} }
@ -175,7 +177,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectOpenHashMap()); private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectHashMap());
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <VType> ImmutableOpenLongMap<VType> of() { public static <VType> ImmutableOpenLongMap<VType> of() {
@ -196,7 +198,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
public static class Builder<VType> implements LongObjectMap<VType> { public static class Builder<VType> implements LongObjectMap<VType> {
private LongObjectOpenHashMap<VType> map; private LongObjectHashMap<VType> map;
public Builder() { public Builder() {
//noinspection unchecked //noinspection unchecked
@ -204,7 +206,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
} }
public Builder(int size) { public Builder(int size) {
this.map = new LongObjectOpenHashMap<>(size); this.map = new LongObjectHashMap<>(size);
} }
public Builder(ImmutableOpenLongMap<VType> map) { public Builder(ImmutableOpenLongMap<VType> map) {
@ -215,7 +217,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
* Builds a new instance of the * Builds a new instance of the
*/ */
public ImmutableOpenLongMap<VType> build() { public ImmutableOpenLongMap<VType> build() {
LongObjectOpenHashMap<VType> map = this.map; LongObjectHashMap<VType> map = this.map;
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest) this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
return new ImmutableOpenLongMap<>(map); return new ImmutableOpenLongMap<>(map);
} }
@ -311,11 +313,6 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
return map.removeAll(predicate); return map.removeAll(predicate);
} }
@Override
public <T extends LongObjectProcedure<? super VType>> T forEach(T procedure) {
return map.forEach(procedure);
}
@Override @Override
public LongCollection keys() { public LongCollection keys() {
return map.keys(); return map.keys();
@ -325,5 +322,55 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
public ObjectContainer<VType> values() { public ObjectContainer<VType> values() {
return map.values(); return map.values();
} }
@Override
public <T extends LongObjectProcedure<? super VType>> T forEach(T procedure) {
return map.forEach(procedure);
}
@Override
public int indexOf(long key) {
return map.indexOf(key);
}
@Override
public boolean indexExists(int index) {
return map.indexExists(index);
}
@Override
public VType indexGet(int index) {
return map.indexGet(index);
}
@Override
public VType indexReplace(int index, VType newValue) {
return map.indexReplace(index, newValue);
}
@Override
public void indexInsert(int index, long key, VType value) {
map.indexInsert(index, key, value);
}
@Override
public void release() {
map.release();
}
@Override
public String visualizeKeyDistribution(int characters) {
return map.visualizeKeyDistribution(characters);
}
@Override
public int removeAll(LongObjectPredicate<? super VType> predicate) {
return map.removeAll(predicate);
}
@Override
public <T extends LongObjectPredicate<? super VType>> T forEach(T predicate) {
return map.forEach(predicate);
}
} }
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.collect;
import com.carrotsearch.hppc.*; import com.carrotsearch.hppc.*;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.carrotsearch.hppc.predicates.ObjectObjectPredicate;
import com.carrotsearch.hppc.predicates.ObjectPredicate; import com.carrotsearch.hppc.predicates.ObjectPredicate;
import com.carrotsearch.hppc.procedures.ObjectObjectProcedure; import com.carrotsearch.hppc.procedures.ObjectObjectProcedure;
import com.google.common.collect.UnmodifiableIterator; import com.google.common.collect.UnmodifiableIterator;
@ -37,9 +38,9 @@ import java.util.Map;
*/ */
public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObjectCursor<KType, VType>> { public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObjectCursor<KType, VType>> {
private final ObjectObjectOpenHashMap<KType, VType> map; private final ObjectObjectHashMap<KType, VType> map;
private ImmutableOpenMap(ObjectObjectOpenHashMap<KType, VType> map) { private ImmutableOpenMap(ObjectObjectHashMap<KType, VType> map) {
this.map = map; this.map = map;
} }
@ -182,7 +183,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private static final ImmutableOpenMap EMPTY = new ImmutableOpenMap(new ObjectObjectOpenHashMap()); private static final ImmutableOpenMap EMPTY = new ImmutableOpenMap(new ObjectObjectHashMap());
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static <KType, VType> ImmutableOpenMap<KType, VType> of() { public static <KType, VType> ImmutableOpenMap<KType, VType> of() {
@ -211,8 +212,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
} }
public static class Builder<KType, VType> implements ObjectObjectMap<KType, VType> { public static class Builder<KType, VType> implements ObjectObjectMap<KType, VType> {
private ObjectObjectHashMap<KType, VType> map;
private ObjectObjectOpenHashMap<KType, VType> map;
public Builder() { public Builder() {
//noinspection unchecked //noinspection unchecked
@ -220,7 +220,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
} }
public Builder(int size) { public Builder(int size) {
this.map = new ObjectObjectOpenHashMap<>(size); this.map = new ObjectObjectHashMap<>(size);
} }
public Builder(ImmutableOpenMap<KType, VType> map) { public Builder(ImmutableOpenMap<KType, VType> map) {
@ -231,11 +231,13 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
* Builds a new instance of the * Builds a new instance of the
*/ */
public ImmutableOpenMap<KType, VType> build() { public ImmutableOpenMap<KType, VType> build() {
ObjectObjectOpenHashMap<KType, VType> map = this.map; ObjectObjectHashMap<KType, VType> map = this.map;
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest) this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
return new ImmutableOpenMap<>(map); return new ImmutableOpenMap<>(map);
} }
/** /**
* Puts all the entries in the map to the builder. * Puts all the entries in the map to the builder.
*/ */
@ -313,7 +315,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
} }
@Override @Override
public int removeAll(ObjectContainer<? extends KType> container) { public int removeAll(ObjectContainer<? super KType> container) {
return map.removeAll(container); return map.removeAll(container);
} }
@ -347,5 +349,49 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
return (Builder) this; return (Builder) this;
} }
@Override
public int removeAll(ObjectObjectPredicate<? super KType, ? super VType> predicate) {
return map.removeAll(predicate);
}
@Override
public <T extends ObjectObjectPredicate<? super KType, ? super VType>> T forEach(T predicate) {
return map.forEach(predicate);
}
@Override
public int indexOf(KType key) {
return map.indexOf(key);
}
@Override
public boolean indexExists(int index) {
return map.indexExists(index);
}
@Override
public VType indexGet(int index) {
return map.indexGet(index);
}
@Override
public VType indexReplace(int index, VType newValue) {
return map.indexReplace(index, newValue);
}
@Override
public void indexInsert(int index, KType key, VType value) {
map.indexInsert(index, key, value);
}
@Override
public void release() {
map.release();
}
@Override
public String visualizeKeyDistribution(int characters) {
return map.visualizeKeyDistribution(characters);
}
} }
} }

View File

@ -409,17 +409,22 @@ public class GeoUtils {
return point.reset(lat, lon); return point.reset(lat, lon);
} else if(parser.currentToken() == Token.VALUE_STRING) { } else if(parser.currentToken() == Token.VALUE_STRING) {
String data = parser.text(); String data = parser.text();
return parseGeoPoint(data, point);
} else {
throw new ElasticsearchParseException("geo_point expected");
}
}
/** parse a {@link GeoPoint} from a String */
public static GeoPoint parseGeoPoint(String data, GeoPoint point) {
int comma = data.indexOf(','); int comma = data.indexOf(',');
if(comma > 0) { if(comma > 0) {
lat = Double.parseDouble(data.substring(0, comma).trim()); double lat = Double.parseDouble(data.substring(0, comma).trim());
lon = Double.parseDouble(data.substring(comma + 1).trim()); double lon = Double.parseDouble(data.substring(comma + 1).trim());
return point.reset(lat, lon); return point.reset(lat, lon);
} else { } else {
return point.resetFromGeoHash(data); return point.resetFromGeoHash(data);
} }
} else {
throw new ElasticsearchParseException("geo_point expected");
}
} }
private GeoUtils() { private GeoUtils() {

View File

@ -1,216 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.RamUsageEstimator;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
*
*/
public class AndDocIdSet extends DocIdSet {
private final DocIdSet[] sets;
public AndDocIdSet(DocIdSet[] sets) {
this.sets = sets;
}
@Override
public boolean isCacheable() {
for (DocIdSet set : sets) {
if (!set.isCacheable()) {
return false;
}
}
return true;
}
@Override
public long ramBytesUsed() {
long ramBytesUsed = RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
for (DocIdSet set : sets) {
ramBytesUsed += RamUsageEstimator.NUM_BYTES_OBJECT_REF + set.ramBytesUsed();
}
return ramBytesUsed;
}
@Override
public Bits bits() throws IOException {
Bits[] bits = new Bits[sets.length];
for (int i = 0; i < sets.length; i++) {
bits[i] = sets[i].bits();
if (bits[i] == null) {
return null;
}
}
return new AndBits(bits);
}
@Override
public DocIdSetIterator iterator() throws IOException {
// we try and be smart here, if we can iterate through docsets quickly, prefer to iterate
// over them as much as possible, before actually going to "bits" based ones to check
List<DocIdSetIterator> iterators = new ArrayList<>(sets.length);
List<Bits> bits = new ArrayList<>(sets.length);
for (DocIdSet set : sets) {
if (DocIdSets.isEmpty(set)) {
return DocIdSetIterator.empty();
}
DocIdSetIterator it = set.iterator();
if (it == null) {
return DocIdSetIterator.empty();
}
Bits bit = set.bits();
if (bit != null && bit instanceof BitSet == false) {
bits.add(bit);
} else {
iterators.add(it);
}
}
if (bits.isEmpty()) {
return IteratorBasedIterator.newDocIdSetIterator(iterators);
}
if (iterators.isEmpty()) {
return new BitsDocIdSetIterator(new AndBits(bits.toArray(new Bits[bits.size()])));
}
// combination of both..., first iterating over the "fast" ones, and then checking on the more
// expensive ones
return new BitsDocIdSetIterator.FilteredIterator(
IteratorBasedIterator.newDocIdSetIterator(iterators),
new AndBits(bits.toArray(new Bits[bits.size()]))
);
}
/** A conjunction between several {@link Bits} instances with short-circuit logic. */
public static class AndBits implements Bits {
private final Bits[] bits;
public AndBits(Bits[] bits) {
this.bits = bits;
}
@Override
public boolean get(int index) {
for (Bits bit : bits) {
if (!bit.get(index)) {
return false;
}
}
return true;
}
@Override
public int length() {
return bits[0].length();
}
}
static class IteratorBasedIterator extends DocIdSetIterator {
private int doc = -1;
private final DocIdSetIterator lead;
private final DocIdSetIterator[] otherIterators;
public static DocIdSetIterator newDocIdSetIterator(Collection<DocIdSetIterator> iterators) throws IOException {
if (iterators.isEmpty()) {
return DocIdSetIterator.empty();
}
if (iterators.size() == 1) {
// shortcut if there is only one valid iterator.
return iterators.iterator().next();
}
return new IteratorBasedIterator(iterators);
}
private IteratorBasedIterator(Collection<DocIdSetIterator> iterators) throws IOException {
final DocIdSetIterator[] sortedIterators = iterators.toArray(new DocIdSetIterator[iterators.size()]);
new InPlaceMergeSorter() {
@Override
protected int compare(int i, int j) {
return Long.compare(sortedIterators[i].cost(), sortedIterators[j].cost());
}
@Override
protected void swap(int i, int j) {
ArrayUtil.swap(sortedIterators, i, j);
}
}.sort(0, sortedIterators.length);
lead = sortedIterators[0];
this.otherIterators = Arrays.copyOfRange(sortedIterators, 1, sortedIterators.length);
}
@Override
public final int docID() {
return doc;
}
@Override
public final int nextDoc() throws IOException {
doc = lead.nextDoc();
return doNext();
}
@Override
public final int advance(int target) throws IOException {
doc = lead.advance(target);
return doNext();
}
private int doNext() throws IOException {
main:
while (true) {
for (DocIdSetIterator otherIterator : otherIterators) {
// the following assert is the invariant of the loop
assert otherIterator.docID() <= doc;
// the current doc might already be equal to doc if it broke the loop
// at the previous iteration
if (otherIterator.docID() < doc) {
final int advanced = otherIterator.advance(doc);
if (advanced > doc) {
doc = lead.advance(advanced);
continue main;
}
}
}
return doc;
}
}
@Override
public long cost() {
return lead.cost();
}
}
}

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.util.Bits;
/**
* A {@link Bits} based iterator.
*/
public class BitsDocIdSetIterator extends MatchDocIdSetIterator {
private final Bits bits;
public BitsDocIdSetIterator(Bits bits) {
super(bits.length());
this.bits = bits;
}
public BitsDocIdSetIterator(int maxDoc, Bits bits) {
super(maxDoc);
this.bits = bits;
}
@Override
protected boolean matchDoc(int doc) {
return bits.get(doc);
}
public static class FilteredIterator extends FilteredDocIdSetIterator {
private final Bits bits;
FilteredIterator(DocIdSetIterator innerIter, Bits bits) {
super(innerIter);
this.bits = bits;
}
@Override
protected boolean match(int doc) {
return bits.get(doc);
}
}
@Override
public long cost() {
return this.bits.length();
}
}

View File

@ -19,17 +19,11 @@
package org.elasticsearch.common.lucene.docset; package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.RoaringDocIdSet;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import java.io.IOException; import java.io.IOException;
@ -38,13 +32,6 @@ import java.io.IOException;
*/ */
public class DocIdSets { public class DocIdSets {
/**
* Return the size of the doc id set, plus a reference to it.
*/
public static long sizeInBytes(DocIdSet docIdSet) {
return RamUsageEstimator.NUM_BYTES_OBJECT_REF + docIdSet.ramBytesUsed();
}
/** /**
* Is it an empty {@link DocIdSet}? * Is it an empty {@link DocIdSet}?
*/ */
@ -52,59 +39,6 @@ public class DocIdSets {
return set == null || set == DocIdSet.EMPTY; return set == null || set == DocIdSet.EMPTY;
} }
/**
* Converts to a cacheable {@link DocIdSet}
* <p/>
* This never returns <code>null</code>.
*/
public static DocIdSet toCacheable(LeafReader reader, @Nullable DocIdSet set) throws IOException {
if (set == null || set == DocIdSet.EMPTY) {
return DocIdSet.EMPTY;
}
final DocIdSetIterator it = set.iterator();
if (it == null) {
return DocIdSet.EMPTY;
}
final int firstDoc = it.nextDoc();
if (firstDoc == DocIdSetIterator.NO_MORE_DOCS) {
return DocIdSet.EMPTY;
}
if (set instanceof BitDocIdSet) {
return set;
}
final RoaringDocIdSet.Builder builder = new RoaringDocIdSet.Builder(reader.maxDoc());
builder.add(firstDoc);
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
builder.add(doc);
}
return builder.build();
}
/**
* Get a build a {@link Bits} instance that will match all documents
* contained in {@code set}. Note that this is a potentially heavy
* operation as this might require to consume an iterator of this set
* entirely and to load it into a {@link BitSet}. Prefer using
* {@link #asSequentialAccessBits} if you only need to consume the
* {@link Bits} once and in order.
*/
public static Bits toSafeBits(int maxDoc, @Nullable DocIdSet set) throws IOException {
if (set == null) {
return new Bits.MatchNoBits(maxDoc);
}
Bits bits = set.bits();
if (bits != null) {
return bits;
}
DocIdSetIterator iterator = set.iterator();
if (iterator == null) {
return new Bits.MatchNoBits(maxDoc);
}
return toBitSet(iterator, maxDoc);
}
/** /**
* Given a {@link Scorer}, return a {@link Bits} instance that will match * Given a {@link Scorer}, return a {@link Bits} instance that will match
* all documents contained in the set. Note that the returned {@link Bits} * all documents contained in the set. Note that the returned {@link Bits}
@ -168,18 +102,4 @@ public class DocIdSets {
}; };
} }
/**
* Creates a {@link BitSet} from an iterator.
*/
public static BitSet toBitSet(DocIdSetIterator iterator, int numBits) throws IOException {
BitDocIdSet.Builder builder = new BitDocIdSet.Builder(numBits);
builder.or(iterator);
BitDocIdSet result = builder.build();
if (result != null) {
return result.bits();
} else {
return new SparseFixedBitSet(numBits);
}
}
} }

View File

@ -1,68 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
*/
public abstract class MatchDocIdSetIterator extends DocIdSetIterator {
private final int maxDoc;
private int doc = -1;
public MatchDocIdSetIterator(int maxDoc) {
this.maxDoc = maxDoc;
}
protected abstract boolean matchDoc(int doc);
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() throws IOException {
do {
doc++;
if (doc >= maxDoc) {
return doc = NO_MORE_DOCS;
}
} while (!matchDoc(doc));
return doc;
}
@Override
public int advance(int target) throws IOException {
if (target >= maxDoc) {
return doc = NO_MORE_DOCS;
}
doc = target;
while (!matchDoc(doc)) {
doc++;
if (doc >= maxDoc) {
return doc = NO_MORE_DOCS;
}
}
return doc;
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.lucene.search; package org.elasticsearch.common.lucene.search;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
@ -149,7 +149,7 @@ public class MultiPhrasePrefixQuery extends Query {
} }
Term[] suffixTerms = termArrays.get(sizeMinus1); Term[] suffixTerms = termArrays.get(sizeMinus1);
int position = positions.get(sizeMinus1); int position = positions.get(sizeMinus1);
ObjectOpenHashSet<Term> terms = new ObjectOpenHashSet<>(); ObjectHashSet<Term> terms = new ObjectHashSet<>();
for (Term term : suffixTerms) { for (Term term : suffixTerms) {
getPrefixTerms(terms, term, reader); getPrefixTerms(terms, term, reader);
if (terms.size() > maxExpansions) { if (terms.size() > maxExpansions) {
@ -163,7 +163,7 @@ public class MultiPhrasePrefixQuery extends Query {
return query.rewrite(reader); return query.rewrite(reader);
} }
private void getPrefixTerms(ObjectOpenHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException { private void getPrefixTerms(ObjectHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
// SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms // SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
// instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually. // instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
List<LeafReaderContext> leaves = reader.leaves(); List<LeafReaderContext> leaves = reader.leaves();

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.recycler; package org.elasticsearch.common.recycler;
import com.carrotsearch.hppc.hash.MurmurHash3; import com.carrotsearch.hppc.BitMixer;
import com.google.common.collect.Queues; import com.google.common.collect.Queues;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -173,7 +173,7 @@ public enum Recyclers {
final int slot() { final int slot() {
final long id = Thread.currentThread().getId(); final long id = Thread.currentThread().getId();
// don't trust Thread.hashCode to have equiprobable low bits // don't trust Thread.hashCode to have equiprobable low bits
int slot = (int) MurmurHash3.hash(id); int slot = (int) BitMixer.mix64(id);
// make positive, otherwise % may return negative numbers // make positive, otherwise % may return negative numbers
slot &= 0x7FFFFFFF; slot &= 0x7FFFFFFF;
slot %= concurrencyLevel; slot %= concurrencyLevel;

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.carrotsearch.hppc.hash.MurmurHash3; import com.carrotsearch.hppc.BitMixer;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
@ -35,7 +35,7 @@ abstract class AbstractPagedHashMap implements Releasable {
static long hash(long value) { static long hash(long value) {
// Don't use the value directly. Under some cases eg dates, it could be that the low bits don't carry much value and we would like // Don't use the value directly. Under some cases eg dates, it could be that the low bits don't carry much value and we would like
// all bits of the hash to carry as much value // all bits of the hash to carry as much value
return MurmurHash3.hash(value); return BitMixer.mix64(value);
} }
static long hash(double value) { static long hash(double value) {

View File

@ -19,7 +19,8 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.carrotsearch.hppc.hash.MurmurHash3; import com.carrotsearch.hppc.BitMixer;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
@ -56,7 +57,7 @@ public final class BytesRefHash extends AbstractHash {
// BytesRef has a weak hashCode function so we try to improve it by rehashing using Murmur3 // BytesRef has a weak hashCode function so we try to improve it by rehashing using Murmur3
// Feel free to remove rehashing if BytesRef gets a better hash function // Feel free to remove rehashing if BytesRef gets a better hash function
private static int rehash(int hash) { private static int rehash(int hash) {
return MurmurHash3.hash(hash); return BitMixer.mix32(hash);
} }
/** /**

View File

@ -19,8 +19,8 @@
package org.elasticsearch.gateway; package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectFloatOpenHashMap; import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
@ -68,7 +68,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
} }
public void performStateRecovery(final GatewayStateRecoveredListener listener) throws GatewayException { public void performStateRecovery(final GatewayStateRecoveredListener listener) throws GatewayException {
ObjectOpenHashSet<String> nodesIds = ObjectOpenHashSet.from(clusterService.state().nodes().masterNodes().keys()); ObjectHashSet<String> nodesIds = new ObjectHashSet<>(clusterService.state().nodes().masterNodes().keys());
logger.trace("performing state recovery from {}", nodesIds); logger.trace("performing state recovery from {}", nodesIds);
TransportNodesListGatewayMetaState.NodesGatewayMetaState nodesState = listGatewayMetaState.list(nodesIds.toArray(String.class), null).actionGet(); TransportNodesListGatewayMetaState.NodesGatewayMetaState nodesState = listGatewayMetaState.list(nodesIds.toArray(String.class), null).actionGet();
@ -104,7 +104,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
} }
} }
ObjectFloatOpenHashMap<String> indices = new ObjectFloatOpenHashMap<>(); ObjectFloatHashMap<String> indices = new ObjectFloatHashMap<>();
MetaData electedGlobalState = null; MetaData electedGlobalState = null;
int found = 0; int found = 0;
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) { for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
@ -127,10 +127,11 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
} }
// update the global state, and clean the indices, we elect them in the next phase // update the global state, and clean the indices, we elect them in the next phase
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices(); MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices();
final boolean[] states = indices.allocated;
assert !indices.containsKey(null);
final Object[] keys = indices.keys; final Object[] keys = indices.keys;
for (int i = 0; i < states.length; i++) { for (int i = 0; i < keys.length; i++) {
if (states[i]) { if (keys[i] != null) {
String index = (String) keys[i]; String index = (String) keys[i];
IndexMetaData electedIndexMetaData = null; IndexMetaData electedIndexMetaData = null;
int indexMetaDataCount = 0; int indexMetaDataCount = 0;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.gateway; package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectLongOpenHashMap; import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.predicates.ObjectPredicate; import com.carrotsearch.hppc.predicates.ObjectPredicate;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
@ -68,7 +68,7 @@ public class GatewayAllocator extends AbstractComponent {
private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap(); private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<ShardId, ObjectLongOpenHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap(); private final ConcurrentMap<ShardId, ObjectLongHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
private final TimeValue listTimeout; private final TimeValue listTimeout;
@ -121,16 +121,17 @@ public class GatewayAllocator extends AbstractComponent {
continue; continue;
} }
ObjectLongOpenHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard, metaData.index(shard.index())); ObjectLongHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard, metaData.index(shard.index()));
int numberOfAllocationsFound = 0; int numberOfAllocationsFound = 0;
long highestVersion = -1; long highestVersion = -1;
Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet(); Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet();
final boolean[] states = nodesState.allocated;
assert !nodesState.containsKey(null);
final Object[] keys = nodesState.keys; final Object[] keys = nodesState.keys;
final long[] values = nodesState.values; final long[] values = nodesState.values;
for (int i = 0; i < states.length; i++) { for (int i = 0; i < keys.length; i++) {
if (!states[i]) { if (keys[i] == null) {
continue; continue;
} }
@ -380,13 +381,13 @@ public class GatewayAllocator extends AbstractComponent {
* A shard on shared storage will return at least shard state 0 for all * A shard on shared storage will return at least shard state 0 for all
* nodes, indicating that the shard can be allocated to any node. * nodes, indicating that the shard can be allocated to any node.
*/ */
private ObjectLongOpenHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard, IndexMetaData indexMetaData) { private ObjectLongHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard, IndexMetaData indexMetaData) {
ObjectLongOpenHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId()); ObjectLongHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
ObjectOpenHashSet<String> nodeIds; ObjectHashSet<String> nodeIds;
if (shardStates == null) { if (shardStates == null) {
shardStates = new ObjectLongOpenHashMap<>(); shardStates = new ObjectLongHashMap<>();
cachedShardsState.put(shard.shardId(), shardStates); cachedShardsState.put(shard.shardId(), shardStates);
nodeIds = ObjectOpenHashSet.from(nodes.dataNodes().keys()); nodeIds = new ObjectHashSet<>(nodes.dataNodes().keys());
} else { } else {
// clean nodes that have failed // clean nodes that have failed
shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() { shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() {
@ -395,7 +396,7 @@ public class GatewayAllocator extends AbstractComponent {
return !nodes.nodeExists(node.id()); return !nodes.nodeExists(node.id());
} }
}); });
nodeIds = ObjectOpenHashSet.newInstance(); nodeIds = new ObjectHashSet<>();
// we have stored cached from before, see if the nodes changed, if they have, go fetch again // we have stored cached from before, see if the nodes changed, if they have, go fetch again
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) { for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
DiscoveryNode node = cursor.value; DiscoveryNode node = cursor.value;
@ -442,13 +443,13 @@ public class GatewayAllocator extends AbstractComponent {
private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) { private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) {
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId()); Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId());
ObjectOpenHashSet<String> nodesIds; ObjectHashSet<String> nodesIds;
if (shardStores == null) { if (shardStores == null) {
shardStores = Maps.newHashMap(); shardStores = Maps.newHashMap();
cachedStores.put(shard.shardId(), shardStores); cachedStores.put(shard.shardId(), shardStores);
nodesIds = ObjectOpenHashSet.from(nodes.dataNodes().keys()); nodesIds = new ObjectHashSet<>(nodes.dataNodes().keys());
} else { } else {
nodesIds = ObjectOpenHashSet.newInstance(); nodesIds = new ObjectHashSet<>();
// clean nodes that have failed // clean nodes that have failed
for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) { for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) {
DiscoveryNode node = it.next(); DiscoveryNode node = it.next();

View File

@ -183,7 +183,7 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
Path file = siteFile.resolve(sitePath); Path file = siteFile.resolve(sitePath);
// return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist // return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist
if (!Files.exists(file) || Files.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath())) { if (!Files.exists(file) || Files.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath().normalize())) {
channel.sendResponse(new BytesRestResponse(NOT_FOUND)); channel.sendResponse(new BytesRestResponse(NOT_FOUND));
return; return;
} }

View File

@ -63,7 +63,9 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
if (analyzer != null) { if (analyzer != null) {
return analyzer; return analyzer;
} }
return defaultAnalyzer; // Don't be lenient here and return the default analyzer
// Fields need to be explicitly added
throw new IllegalArgumentException("Field [" + name + "] has no associated analyzer");
} }
/** /**
@ -72,9 +74,11 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
public FieldNameAnalyzer copyAndAddAll(Collection<? extends Map.Entry<String, Analyzer>> mappers) { public FieldNameAnalyzer copyAndAddAll(Collection<? extends Map.Entry<String, Analyzer>> mappers) {
CopyOnWriteHashMap<String, Analyzer> analyzers = this.analyzers; CopyOnWriteHashMap<String, Analyzer> analyzers = this.analyzers;
for (Map.Entry<String, Analyzer> entry : mappers) { for (Map.Entry<String, Analyzer> entry : mappers) {
if (entry.getValue() != null) { Analyzer analyzer = entry.getValue();
analyzers = analyzers.copyAndPut(entry.getKey(), entry.getValue()); if (analyzer == null) {
analyzer = defaultAnalyzer;
} }
analyzers = analyzers.copyAndPut(entry.getKey(), analyzer);
} }
return new FieldNameAnalyzer(analyzers, defaultAnalyzer); return new FieldNameAnalyzer(analyzers, defaultAnalyzer);
} }

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntObjectHashMap;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatter;
@ -32,12 +32,12 @@ import java.util.Map;
*/ */
public class NumericDateAnalyzer extends NumericAnalyzer<NumericDateTokenizer> { public class NumericDateAnalyzer extends NumericAnalyzer<NumericDateTokenizer> {
private static final Map<String, IntObjectOpenHashMap<NamedAnalyzer>> globalAnalyzers = Maps.newHashMap(); private static final Map<String, IntObjectHashMap<NamedAnalyzer>> globalAnalyzers = Maps.newHashMap();
public static synchronized NamedAnalyzer buildNamedAnalyzer(FormatDateTimeFormatter formatter, int precisionStep) { public static synchronized NamedAnalyzer buildNamedAnalyzer(FormatDateTimeFormatter formatter, int precisionStep) {
IntObjectOpenHashMap<NamedAnalyzer> precisionMap = globalAnalyzers.get(formatter.format()); IntObjectHashMap<NamedAnalyzer> precisionMap = globalAnalyzers.get(formatter.format());
if (precisionMap == null) { if (precisionMap == null) {
precisionMap = new IntObjectOpenHashMap<>(); precisionMap = new IntObjectHashMap<>();
globalAnalyzers.put(formatter.format(), precisionMap); globalAnalyzers.put(formatter.format(), precisionMap);
} }
NamedAnalyzer namedAnalyzer = precisionMap.get(precisionStep); NamedAnalyzer namedAnalyzer = precisionMap.get(precisionStep);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntObjectHashMap;
import java.io.IOException; import java.io.IOException;
@ -28,10 +28,10 @@ import java.io.IOException;
*/ */
public class NumericDoubleAnalyzer extends NumericAnalyzer<NumericDoubleTokenizer> { public class NumericDoubleAnalyzer extends NumericAnalyzer<NumericDoubleTokenizer> {
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn; private final static IntObjectHashMap<NamedAnalyzer> builtIn;
static { static {
builtIn = new IntObjectOpenHashMap<>(); builtIn = new IntObjectHashMap<>();
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_double/max", AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(Integer.MAX_VALUE))); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_double/max", AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(Integer.MAX_VALUE)));
for (int i = 0; i <= 64; i += 4) { for (int i = 0; i <= 64; i += 4) {
builtIn.put(i, new NamedAnalyzer("_double/" + i, AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(i))); builtIn.put(i, new NamedAnalyzer("_double/" + i, AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(i)));

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntObjectHashMap;
import java.io.IOException; import java.io.IOException;
@ -28,10 +28,10 @@ import java.io.IOException;
*/ */
public class NumericFloatAnalyzer extends NumericAnalyzer<NumericFloatTokenizer> { public class NumericFloatAnalyzer extends NumericAnalyzer<NumericFloatTokenizer> {
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn; private final static IntObjectHashMap<NamedAnalyzer> builtIn;
static { static {
builtIn = new IntObjectOpenHashMap<>(); builtIn = new IntObjectHashMap<>();
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_float/max", AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(Integer.MAX_VALUE))); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_float/max", AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(Integer.MAX_VALUE)));
for (int i = 0; i <= 64; i += 4) { for (int i = 0; i <= 64; i += 4) {
builtIn.put(i, new NamedAnalyzer("_float/" + i, AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(i))); builtIn.put(i, new NamedAnalyzer("_float/" + i, AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(i)));

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntObjectHashMap;
import java.io.IOException; import java.io.IOException;
@ -28,10 +28,10 @@ import java.io.IOException;
*/ */
public class NumericIntegerAnalyzer extends NumericAnalyzer<NumericIntegerTokenizer> { public class NumericIntegerAnalyzer extends NumericAnalyzer<NumericIntegerTokenizer> {
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn; private final static IntObjectHashMap<NamedAnalyzer> builtIn;
static { static {
builtIn = new IntObjectOpenHashMap<>(); builtIn = new IntObjectHashMap<>();
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_int/max", AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(Integer.MAX_VALUE))); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_int/max", AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(Integer.MAX_VALUE)));
for (int i = 0; i <= 64; i += 4) { for (int i = 0; i <= 64; i += 4) {
builtIn.put(i, new NamedAnalyzer("_int/" + i, AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(i))); builtIn.put(i, new NamedAnalyzer("_int/" + i, AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(i)));

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.analysis; package org.elasticsearch.index.analysis;
import com.carrotsearch.hppc.IntObjectOpenHashMap; import com.carrotsearch.hppc.IntObjectHashMap;
import java.io.IOException; import java.io.IOException;
@ -28,10 +28,10 @@ import java.io.IOException;
*/ */
public class NumericLongAnalyzer extends NumericAnalyzer<NumericLongTokenizer> { public class NumericLongAnalyzer extends NumericAnalyzer<NumericLongTokenizer> {
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn; private final static IntObjectHashMap<NamedAnalyzer> builtIn;
static { static {
builtIn = new IntObjectOpenHashMap<>(); builtIn = new IntObjectHashMap<>();
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_long/max", AnalyzerScope.GLOBAL, new NumericLongAnalyzer(Integer.MAX_VALUE))); builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_long/max", AnalyzerScope.GLOBAL, new NumericLongAnalyzer(Integer.MAX_VALUE)));
for (int i = 0; i <= 64; i += 4) { for (int i = 0; i <= 64; i += 4) {
builtIn.put(i, new NamedAnalyzer("_long/" + i, AnalyzerScope.GLOBAL, new NumericLongAnalyzer(i))); builtIn.put(i, new NamedAnalyzer("_long/" + i, AnalyzerScope.GLOBAL, new NumericLongAnalyzer(i)));

View File

@ -1,89 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.id;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
/**
* @deprecated Id cache has been removed in favor for {@link org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData}
* this class now reports field data memory usage for _parent field.
*/
@Deprecated
public class IdCacheStats implements Streamable, ToXContent {
long memorySize;
public IdCacheStats() {
}
public IdCacheStats(long memorySize) {
this.memorySize = memorySize;
}
public void add(IdCacheStats stats) {
this.memorySize += stats.memorySize;
}
public long getMemorySizeInBytes() {
return this.memorySize;
}
public ByteSizeValue getMemorySize() {
return new ByteSizeValue(memorySize);
}
public static IdCacheStats readIdCacheStats(StreamInput in) throws IOException {
IdCacheStats stats = new IdCacheStats();
stats.readFrom(in);
return stats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
memorySize = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(memorySize);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.ID_CACHE);
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, memorySize);
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString ID_CACHE = new XContentBuilderString("id_cache");
static final XContentBuilderString MEMORY_SIZE = new XContentBuilderString("memory_size");
static final XContentBuilderString MEMORY_SIZE_IN_BYTES = new XContentBuilderString("memory_size_in_bytes");
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import com.carrotsearch.hppc.ObjectLongOpenHashMap; import com.carrotsearch.hppc.ObjectLongHashMap;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -38,13 +38,13 @@ public class FieldDataStats implements Streamable, ToXContent {
long memorySize; long memorySize;
long evictions; long evictions;
@Nullable @Nullable
ObjectLongOpenHashMap<String> fields; ObjectLongHashMap<String> fields;
public FieldDataStats() { public FieldDataStats() {
} }
public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongOpenHashMap<String> fields) { public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongHashMap<String> fields) {
this.memorySize = memorySize; this.memorySize = memorySize;
this.evictions = evictions; this.evictions = evictions;
this.fields = fields; this.fields = fields;
@ -54,17 +54,20 @@ public class FieldDataStats implements Streamable, ToXContent {
this.memorySize += stats.memorySize; this.memorySize += stats.memorySize;
this.evictions += stats.evictions; this.evictions += stats.evictions;
if (stats.fields != null) { if (stats.fields != null) {
if (fields == null) fields = new ObjectLongOpenHashMap<>(); if (fields == null) {
final boolean[] states = stats.fields.allocated; fields = stats.fields.clone();
} else {
assert !stats.fields.containsKey(null);
final Object[] keys = stats.fields.keys; final Object[] keys = stats.fields.keys;
final long[] values = stats.fields.values; final long[] values = stats.fields.values;
for (int i = 0; i < states.length; i++) { for (int i = 0; i < keys.length; i++) {
if (states[i]) { if (keys[i] != null) {
fields.addTo((String) keys[i], values[i]); fields.addTo((String) keys[i], values[i]);
} }
} }
} }
} }
}
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
return this.memorySize; return this.memorySize;
@ -79,7 +82,7 @@ public class FieldDataStats implements Streamable, ToXContent {
} }
@Nullable @Nullable
public ObjectLongOpenHashMap<String> getFields() { public ObjectLongHashMap<String> getFields() {
return fields; return fields;
} }
@ -95,7 +98,7 @@ public class FieldDataStats implements Streamable, ToXContent {
evictions = in.readVLong(); evictions = in.readVLong();
if (in.readBoolean()) { if (in.readBoolean()) {
int size = in.readVInt(); int size = in.readVInt();
fields = new ObjectLongOpenHashMap<>(size); fields = new ObjectLongHashMap<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
fields.put(in.readString(), in.readVLong()); fields.put(in.readString(), in.readVLong());
} }
@ -111,11 +114,11 @@ public class FieldDataStats implements Streamable, ToXContent {
} else { } else {
out.writeBoolean(true); out.writeBoolean(true);
out.writeVInt(fields.size()); out.writeVInt(fields.size());
final boolean[] states = fields.allocated; assert !fields.containsKey(null);
final Object[] keys = fields.keys; final Object[] keys = fields.keys;
final long[] values = fields.values; final long[] values = fields.values;
for (int i = 0; i < states.length; i++) { for (int i = 0; i < keys.length; i++) {
if (states[i]) { if (keys[i] != null) {
out.writeString((String) keys[i]); out.writeString((String) keys[i]);
out.writeVLong(values[i]); out.writeVLong(values[i]);
} }
@ -130,11 +133,11 @@ public class FieldDataStats implements Streamable, ToXContent {
builder.field(Fields.EVICTIONS, getEvictions()); builder.field(Fields.EVICTIONS, getEvictions());
if (fields != null) { if (fields != null) {
builder.startObject(Fields.FIELDS); builder.startObject(Fields.FIELDS);
final boolean[] states = fields.allocated; assert !fields.containsKey(null);
final Object[] keys = fields.keys; final Object[] keys = fields.keys;
final long[] values = fields.values; final long[] values = fields.values;
for (int i = 0; i < states.length; i++) { for (int i = 0; i < keys.length; i++) {
if (states[i]) { if (keys[i] != null) {
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE); builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, values[i]); builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, values[i]);
builder.endObject(); builder.endObject();

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import com.carrotsearch.hppc.ObjectLongOpenHashMap; import com.carrotsearch.hppc.ObjectLongHashMap;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.CounterMetric;
@ -50,23 +50,16 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index
} }
public FieldDataStats stats(String... fields) { public FieldDataStats stats(String... fields) {
ObjectLongOpenHashMap<String> fieldTotals = null; ObjectLongHashMap<String> fieldTotals = null;
if (fields != null && fields.length > 0) { if (fields != null && fields.length > 0) {
fieldTotals = new ObjectLongOpenHashMap<>(); fieldTotals = new ObjectLongHashMap<>();
for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) { for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) {
if (Regex.simpleMatch(fields, entry.getKey())) { if (Regex.simpleMatch(fields, entry.getKey())) {
fieldTotals.put(entry.getKey(), entry.getValue().count()); fieldTotals.put(entry.getKey(), entry.getValue().count());
} }
} }
} }
return new FieldDataStats(totalMetric.count(), evictionsMetric.count(), fieldTotals);
// Because we report _parent field used memory separately via id cache, we need to subtract it from the
// field data total memory used. This code should be removed for >= 2.0
long memorySize = totalMetric.count();
if (perFieldTotals.containsKey(ParentFieldMapper.NAME)) {
memorySize -= perFieldTotals.get(ParentFieldMapper.NAME).count();
}
return new FieldDataStats(memorySize, evictionsMetric.count(), fieldTotals);
} }
@Override @Override

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.ImmutableSortedSet;
@ -132,7 +132,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
); );
ParentChildEstimator estimator = new ParentChildEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), termsEnum); ParentChildEstimator estimator = new ParentChildEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), termsEnum);
TermsEnum estimatedTermsEnum = estimator.beforeLoad(null); TermsEnum estimatedTermsEnum = estimator.beforeLoad(null);
ObjectObjectOpenHashMap<String, TypeBuilder> typeBuilders = ObjectObjectOpenHashMap.newInstance(); ObjectObjectHashMap<String, TypeBuilder> typeBuilders = new ObjectObjectHashMap<>();
try { try {
try { try {
PostingsEnum docsEnum = null; PostingsEnum docsEnum = null;

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import java.util.Objects;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.analysis.FieldNameAnalyzer;

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
@ -79,7 +79,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
public class MapperService extends AbstractIndexComponent { public class MapperService extends AbstractIndexComponent {
public static final String DEFAULT_MAPPING = "_default_"; public static final String DEFAULT_MAPPING = "_default_";
private static ObjectOpenHashSet<String> META_FIELDS = ObjectOpenHashSet.from( private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl" "_size", "_timestamp", "_ttl"
); );

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectObjectMap; import com.carrotsearch.hppc.ObjectObjectMap;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
@ -106,7 +106,7 @@ public abstract class ParseContext {
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ /** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */
public void addWithKey(Object key, IndexableField field) { public void addWithKey(Object key, IndexableField field) {
if (keyedFields == null) { if (keyedFields == null) {
keyedFields = new ObjectObjectOpenHashMap<>(); keyedFields = new ObjectObjectHashMap<>();
} else if (keyedFields.containsKey(key)) { } else if (keyedFields.containsKey(key)) {
throw new IllegalStateException("Only one field can be stored per key"); throw new IllegalStateException("Only one field can be stored per key");
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.core; package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Function; import com.google.common.base.Function;

View File

@ -369,6 +369,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
payload = payload == null ? EMPTY : payload; payload = payload == null ? EMPTY : payload;
if (surfaceForm == null) { // no surface form use the input if (surfaceForm == null) { // no surface form use the input
for (String input : inputs) { for (String input : inputs) {
if (input.length() == 0) {
continue;
}
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
input), weight, payload); input), weight, payload);
context.doc().add(getCompletionField(ctx, input, suggestPayload)); context.doc().add(getCompletionField(ctx, input, suggestPayload));
@ -377,6 +380,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
surfaceForm), weight, payload); surfaceForm), weight, payload);
for (String input : inputs) { for (String input : inputs) {
if (input.length() == 0) {
continue;
}
context.doc().add(getCompletionField(ctx, input, suggestPayload)); context.doc().add(getCompletionField(ctx, input, suggestPayload));
} }
} }

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper.geo; package org.elasticsearch.index.mapper.geo;
import com.carrotsearch.hppc.ObjectOpenHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.base.Objects; import com.google.common.base.Objects;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
@ -748,11 +748,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
TYPE.freeze(); TYPE.freeze();
} }
private final ObjectOpenHashSet<GeoPoint> points; private final ObjectHashSet<GeoPoint> points;
public CustomGeoPointDocValuesField(String name, double lat, double lon) { public CustomGeoPointDocValuesField(String name, double lat, double lon) {
super(name); super(name);
points = new ObjectOpenHashSet<>(2); points = new ObjectHashSet<>(2);
points.add(new GeoPoint(lat, lon)); points.add(new GeoPoint(lat, lon));
} }

View File

@ -54,6 +54,7 @@ import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -150,7 +151,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue(); Object fieldNode = entry.getValue();
if (fieldName.equals("enabled") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode)); builder.enabled(nodeBooleanValue(fieldNode));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { } else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
@ -172,7 +173,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
} else if ("format".equals(fieldName)) { } else if ("format".equals(fieldName)) {
builder.format(nodeStringValue(fieldNode, null)); builder.format(nodeStringValue(fieldNode, null));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("includes") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { } else if (fieldName.equals("includes")) {
List<Object> values = (List<Object>) fieldNode; List<Object> values = (List<Object>) fieldNode;
String[] includes = new String[values.size()]; String[] includes = new String[values.size()];
for (int i = 0; i < includes.length; i++) { for (int i = 0; i < includes.length; i++) {
@ -180,7 +181,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
} }
builder.includes(includes); builder.includes(includes);
iterator.remove(); iterator.remove();
} else if (fieldName.equals("excludes") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { } else if (fieldName.equals("excludes")) {
List<Object> values = (List<Object>) fieldNode; List<Object> values = (List<Object>) fieldNode;
String[] excludes = new String[values.size()]; String[] excludes = new String[values.size()];
for (int i = 0; i < excludes.length; i++) { for (int i = 0; i < excludes.length; i++) {
@ -197,11 +198,14 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
private final boolean enabled; private final boolean enabled;
/** indicates whether the source will always exist and be complete, for use by features like the update API */
private final boolean complete;
private Boolean compress; private Boolean compress;
private long compressThreshold; private long compressThreshold;
private String[] includes; private final String[] includes;
private String[] excludes; private final String[] excludes;
private String format; private String format;
@ -222,10 +226,11 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
this.excludes = excludes; this.excludes = excludes;
this.format = format; this.format = format;
this.formatContentType = format == null ? null : XContentType.fromRestContentType(format); this.formatContentType = format == null ? null : XContentType.fromRestContentType(format);
this.complete = enabled && includes == null && excludes == null;
} }
public boolean enabled() { public boolean enabled() {
return this.enabled; return enabled;
} }
public String[] excludes() { public String[] excludes() {
@ -237,6 +242,10 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
return this.includes != null ? this.includes : Strings.EMPTY_ARRAY; return this.includes != null ? this.includes : Strings.EMPTY_ARRAY;
} }
public boolean isComplete() {
return complete;
}
@Override @Override
public FieldType defaultFieldType() { public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE; return Defaults.FIELD_TYPE;
@ -420,19 +429,23 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
@Override @Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
if (!mergeResult.simulate()) { if (mergeResult.simulate()) {
if (this.enabled != sourceMergeWith.enabled) {
mergeResult.addConflict("Cannot update enabled setting for [_source]");
}
if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) {
mergeResult.addConflict("Cannot update includes setting for [_source]");
}
if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) {
mergeResult.addConflict("Cannot update excludes setting for [_source]");
}
} else {
if (sourceMergeWith.compress != null) { if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress; this.compress = sourceMergeWith.compress;
} }
if (sourceMergeWith.compressThreshold != -1) { if (sourceMergeWith.compressThreshold != -1) {
this.compressThreshold = sourceMergeWith.compressThreshold; this.compressThreshold = sourceMergeWith.compressThreshold;
} }
if (sourceMergeWith.includes != null) {
this.includes = sourceMergeWith.includes;
}
if (sourceMergeWith.excludes != null) {
this.excludes = sourceMergeWith.excludes;
}
} }
} }
} }

View File

@ -30,7 +30,7 @@ import java.util.ArrayList;
* @deprecated Use {@link BoolQueryBuilder} instead * @deprecated Use {@link BoolQueryBuilder} instead
*/ */
@Deprecated @Deprecated
public class AndQueryBuilder extends BaseQueryBuilder { public class AndQueryBuilder extends QueryBuilder {
private ArrayList<QueryBuilder> filters = Lists.newArrayList(); private ArrayList<QueryBuilder> filters = Lists.newArrayList();

Some files were not shown because too many files have changed in this diff Show More