Merge branch 'master' into module_culling3
This commit is contained in:
commit
c0444ccbc6
|
@ -42,7 +42,7 @@ h3. Installation
|
||||||
|
|
||||||
* "Download":https://www.elastic.co/downloads/elasticsearch and unzip the Elasticsearch official distribution.
|
* "Download":https://www.elastic.co/downloads/elasticsearch and unzip the Elasticsearch official distribution.
|
||||||
* Run @bin/elasticsearch@ on unix, or @bin\elasticsearch.bat@ on windows.
|
* Run @bin/elasticsearch@ on unix, or @bin\elasticsearch.bat@ on windows.
|
||||||
* Run @curl -X GET http://127.0.0.1:9200/@.
|
* Run @curl -X GET http://localhost:9200/@.
|
||||||
* Start more servers ...
|
* Start more servers ...
|
||||||
|
|
||||||
h3. Indexing
|
h3. Indexing
|
||||||
|
@ -50,16 +50,16 @@ h3. Indexing
|
||||||
Let's try and index some twitter like information. First, let's create a twitter user, and add some tweets (the @twitter@ index will be created automatically):
|
Let's try and index some twitter like information. First, let's create a twitter user, and add some tweets (the @twitter@ index will be created automatically):
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XPUT 'http://127.0.0.1:9200/twitter/user/kimchy' -d '{ "name" : "Shay Banon" }'
|
curl -XPUT 'http://localhost:9200/twitter/user/kimchy' -d '{ "name" : "Shay Banon" }'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/twitter/tweet/1' -d '
|
curl -XPUT 'http://localhost:9200/twitter/tweet/1' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T13:12:00",
|
"postDate": "2009-11-15T13:12:00",
|
||||||
"message": "Trying out Elasticsearch, so far so good?"
|
"message": "Trying out Elasticsearch, so far so good?"
|
||||||
}'
|
}'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/twitter/tweet/2' -d '
|
curl -XPUT 'http://localhost:9200/twitter/tweet/2' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T14:12:12",
|
"postDate": "2009-11-15T14:12:12",
|
||||||
|
@ -70,9 +70,9 @@ curl -XPUT 'http://127.0.0.1:9200/twitter/tweet/2' -d '
|
||||||
Now, let's see if the information was added by GETting it:
|
Now, let's see if the information was added by GETting it:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/user/kimchy?pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/user/kimchy?pretty=true'
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/1?pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/tweet/1?pretty=true'
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/2?pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/tweet/2?pretty=true'
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
h3. Searching
|
h3. Searching
|
||||||
|
@ -81,13 +81,13 @@ Mmm search..., shouldn't it be elastic?
|
||||||
Let's find all the tweets that @kimchy@ posted:
|
Let's find all the tweets that @kimchy@ posted:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/_search?q=user:kimchy&pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/tweet/_search?q=user:kimchy&pretty=true'
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
We can also use the JSON query language Elasticsearch provides instead of a query string:
|
We can also use the JSON query language Elasticsearch provides instead of a query string:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/twitter/tweet/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"match" : { "user": "kimchy" }
|
"match" : { "user": "kimchy" }
|
||||||
|
@ -98,7 +98,7 @@ curl -XGET 'http://127.0.0.1:9200/twitter/tweet/_search?pretty=true' -d '
|
||||||
Just for kicks, let's get all the documents stored (we should see the user as well):
|
Just for kicks, let's get all the documents stored (we should see the user as well):
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"matchAll" : {}
|
"matchAll" : {}
|
||||||
|
@ -109,7 +109,7 @@ curl -XGET 'http://127.0.0.1:9200/twitter/_search?pretty=true' -d '
|
||||||
We can also do range search (the @postDate@ was automatically identified as date)
|
We can also do range search (the @postDate@ was automatically identified as date)
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"range" : {
|
"range" : {
|
||||||
|
@ -130,16 +130,16 @@ Elasticsearch supports multiple indices, as well as multiple types per index. In
|
||||||
Another way to define our simple twitter system is to have a different index per user (note, though that each index has an overhead). Here is the indexing curl's in this case:
|
Another way to define our simple twitter system is to have a different index per user (note, though that each index has an overhead). Here is the indexing curl's in this case:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XPUT 'http://127.0.0.1:9200/kimchy/info/1' -d '{ "name" : "Shay Banon" }'
|
curl -XPUT 'http://localhost:9200/kimchy/info/1' -d '{ "name" : "Shay Banon" }'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/kimchy/tweet/1' -d '
|
curl -XPUT 'http://localhost:9200/kimchy/tweet/1' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T13:12:00",
|
"postDate": "2009-11-15T13:12:00",
|
||||||
"message": "Trying out Elasticsearch, so far so good?"
|
"message": "Trying out Elasticsearch, so far so good?"
|
||||||
}'
|
}'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/kimchy/tweet/2' -d '
|
curl -XPUT 'http://localhost:9200/kimchy/tweet/2' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T14:12:12",
|
"postDate": "2009-11-15T14:12:12",
|
||||||
|
@ -152,7 +152,7 @@ The above will index information into the @kimchy@ index, with two types, @info@
|
||||||
Complete control on the index level is allowed. As an example, in the above case, we would want to change from the default 5 shards with 1 replica per index, to only 1 shard with 1 replica per index (== per twitter user). Here is how this can be done (the configuration can be in yaml as well):
|
Complete control on the index level is allowed. As an example, in the above case, we would want to change from the default 5 shards with 1 replica per index, to only 1 shard with 1 replica per index (== per twitter user). Here is how this can be done (the configuration can be in yaml as well):
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XPUT http://127.0.0.1:9200/another_user/ -d '
|
curl -XPUT http://localhost:9200/another_user/ -d '
|
||||||
{
|
{
|
||||||
"index" : {
|
"index" : {
|
||||||
"numberOfShards" : 1,
|
"numberOfShards" : 1,
|
||||||
|
@ -165,7 +165,7 @@ Search (and similar operations) are multi index aware. This means that we can ea
|
||||||
index (twitter user), for example:
|
index (twitter user), for example:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/kimchy,another_user/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"matchAll" : {}
|
"matchAll" : {}
|
||||||
|
@ -176,7 +176,7 @@ curl -XGET 'http://127.0.0.1:9200/kimchy,another_user/_search?pretty=true' -d '
|
||||||
Or on all the indices:
|
Or on all the indices:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"matchAll" : {}
|
"matchAll" : {}
|
||||||
|
|
|
@ -321,11 +321,13 @@ Vagrant. You can get started by following there five easy steps:
|
||||||
|
|
||||||
. (Optional) Install vagrant-cachier to squeeze a bit more performance out of
|
. (Optional) Install vagrant-cachier to squeeze a bit more performance out of
|
||||||
the process:
|
the process:
|
||||||
|
|
||||||
--------------------------------------
|
--------------------------------------
|
||||||
vagrant plugin install vagrant-cachier
|
vagrant plugin install vagrant-cachier
|
||||||
--------------------------------------
|
--------------------------------------
|
||||||
|
|
||||||
. Validate your installed dependencies:
|
. Validate your installed dependencies:
|
||||||
|
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
mvn -Dtests.vagrant -pl qa/vagrant validate
|
mvn -Dtests.vagrant -pl qa/vagrant validate
|
||||||
-------------------------------------
|
-------------------------------------
|
||||||
|
@ -334,11 +336,14 @@ mvn -Dtests.vagrant -pl qa/vagrant validate
|
||||||
from Vagrant when you run it inside mvn its probably best if you run this one
|
from Vagrant when you run it inside mvn its probably best if you run this one
|
||||||
time to setup all the VMs one at a time. Run this to download and setup the VMs
|
time to setup all the VMs one at a time. Run this to download and setup the VMs
|
||||||
we use for testing by default:
|
we use for testing by default:
|
||||||
|
|
||||||
--------------------------------------------------------
|
--------------------------------------------------------
|
||||||
vagrant up --provision trusty && vagrant halt trusty
|
vagrant up --provision trusty && vagrant halt trusty
|
||||||
vagrant up --provision centos-7 && vagrant halt centos-7
|
vagrant up --provision centos-7 && vagrant halt centos-7
|
||||||
--------------------------------------------------------
|
--------------------------------------------------------
|
||||||
|
|
||||||
or run this to download and setup all the VMs:
|
or run this to download and setup all the VMs:
|
||||||
|
|
||||||
-------------------------------------------------------------------------------
|
-------------------------------------------------------------------------------
|
||||||
vagrant halt
|
vagrant halt
|
||||||
for box in $(vagrant status | grep 'poweroff\|not created' | cut -f1 -d' '); do
|
for box in $(vagrant status | grep 'poweroff\|not created' | cut -f1 -d' '); do
|
||||||
|
@ -349,24 +354,32 @@ done
|
||||||
|
|
||||||
. Smoke test the maven/ant dance that we use to get vagrant involved in
|
. Smoke test the maven/ant dance that we use to get vagrant involved in
|
||||||
integration testing is working:
|
integration testing is working:
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
mvn -Dtests.vagrant -Psmoke-vms -pl qa/vagrant verify
|
mvn -Dtests.vagrant -Psmoke-vms -pl qa/vagrant verify
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
||||||
or this to validate all the VMs:
|
or this to validate all the VMs:
|
||||||
|
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
mvn -Dtests.vagrant=all -Psmoke-vms -pl qa/vagrant verify
|
mvn -Dtests.vagrant=all -Psmoke-vms -pl qa/vagrant verify
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
|
|
||||||
That will start up the VMs and then immediate quit.
|
That will start up the VMs and then immediate quit.
|
||||||
|
|
||||||
. Finally run the tests. The fastest way to get this started is to run:
|
. Finally run the tests. The fastest way to get this started is to run:
|
||||||
|
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
mvn clean install -DskipTests
|
mvn clean install -DskipTests
|
||||||
mvn -Dtests.vagrant -pl qa/vagrant verify
|
mvn -Dtests.vagrant -pl qa/vagrant verify
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
|
|
||||||
You could just run:
|
You could just run:
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
mvn -Dtests.vagrant verify
|
mvn -Dtests.vagrant verify
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
but that will run all the tests. Which is probably a good thing, but not always
|
but that will run all the tests. Which is probably a good thing, but not always
|
||||||
what you want.
|
what you want.
|
||||||
|
|
||||||
|
@ -379,39 +392,51 @@ packaging and SyvVinit and systemd.
|
||||||
|
|
||||||
You can control the boxes that are used for testing like so. Run just
|
You can control the boxes that are used for testing like so. Run just
|
||||||
fedora-22 with:
|
fedora-22 with:
|
||||||
|
|
||||||
--------------------------------------------
|
--------------------------------------------
|
||||||
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest=fedora-22
|
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest=fedora-22
|
||||||
--------------------------------------------
|
--------------------------------------------
|
||||||
|
|
||||||
or run wheezy and trusty:
|
or run wheezy and trusty:
|
||||||
|
|
||||||
------------------------------------------------------------------
|
------------------------------------------------------------------
|
||||||
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest='wheezy, trusty'
|
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest='wheezy, trusty'
|
||||||
------------------------------------------------------------------
|
------------------------------------------------------------------
|
||||||
|
|
||||||
or run all the boxes:
|
or run all the boxes:
|
||||||
|
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
mvn -Dtests.vagrant=all -pl qa/vagrant verify
|
mvn -Dtests.vagrant=all -pl qa/vagrant verify
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|
||||||
Its important to know that if you ctrl-c any of these `mvn` runs that you'll
|
Its important to know that if you ctrl-c any of these `mvn` runs that you'll
|
||||||
probably leave a VM up. You can terminate it by running:
|
probably leave a VM up. You can terminate it by running:
|
||||||
|
|
||||||
------------
|
------------
|
||||||
vagrant halt
|
vagrant halt
|
||||||
------------
|
------------
|
||||||
|
|
||||||
This is just regular vagrant so you can run normal multi box vagrant commands
|
This is just regular vagrant so you can run normal multi box vagrant commands
|
||||||
to test things manually. Just run:
|
to test things manually. Just run:
|
||||||
|
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
vagrant up trusty && vagrant ssh trusty
|
vagrant up trusty && vagrant ssh trusty
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|
||||||
to get an Ubuntu or
|
to get an Ubuntu or
|
||||||
|
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
vagrant up centos-7 && vagrant ssh centos-7
|
vagrant up centos-7 && vagrant ssh centos-7
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
|
|
||||||
to get a CentOS. Once you are done with them you should halt them:
|
to get a CentOS. Once you are done with them you should halt them:
|
||||||
|
|
||||||
-------------------
|
-------------------
|
||||||
vagrant halt trusty
|
vagrant halt trusty
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
These are the linux flavors the Vagrantfile currently supports:
|
These are the linux flavors the Vagrantfile currently supports:
|
||||||
|
|
||||||
* precise aka Ubuntu 12.04
|
* precise aka Ubuntu 12.04
|
||||||
* trusty aka Ubuntu 14.04
|
* trusty aka Ubuntu 14.04
|
||||||
* vivid aka Ubuntun 15.04
|
* vivid aka Ubuntun 15.04
|
||||||
|
@ -424,23 +449,29 @@ These are the linux flavors the Vagrantfile currently supports:
|
||||||
|
|
||||||
We're missing the following from the support matrix because there aren't high
|
We're missing the following from the support matrix because there aren't high
|
||||||
quality boxes available in vagrant atlas:
|
quality boxes available in vagrant atlas:
|
||||||
|
|
||||||
* sles-11
|
* sles-11
|
||||||
* sles-12
|
* sles-12
|
||||||
* opensuse-13
|
* opensuse-13
|
||||||
* oel-6
|
* oel-6
|
||||||
|
|
||||||
We're missing the follow because our tests are very linux/bash centric:
|
We're missing the follow because our tests are very linux/bash centric:
|
||||||
|
|
||||||
* Windows Server 2012
|
* Windows Server 2012
|
||||||
|
|
||||||
Its important to think of VMs like cattle: if they become lame you just shoot
|
Its important to think of VMs like cattle: if they become lame you just shoot
|
||||||
them and let vagrant reprovision them. Say you've hosed your precise VM:
|
them and let vagrant reprovision them. Say you've hosed your precise VM:
|
||||||
|
|
||||||
----------------------------------------------------
|
----------------------------------------------------
|
||||||
vagrant ssh precise -c 'sudo rm -rf /bin'; echo oops
|
vagrant ssh precise -c 'sudo rm -rf /bin'; echo oops
|
||||||
----------------------------------------------------
|
----------------------------------------------------
|
||||||
|
|
||||||
All you've got to do to get another one is
|
All you've got to do to get another one is
|
||||||
|
|
||||||
----------------------------------------------
|
----------------------------------------------
|
||||||
vagrant destroy -f trusty && vagrant up trusty
|
vagrant destroy -f trusty && vagrant up trusty
|
||||||
----------------------------------------------
|
----------------------------------------------
|
||||||
|
|
||||||
The whole process takes a minute and a half on a modern laptop, two and a half
|
The whole process takes a minute and a half on a modern laptop, two and a half
|
||||||
without vagrant-cachier.
|
without vagrant-cachier.
|
||||||
|
|
||||||
|
@ -450,14 +481,17 @@ around it:
|
||||||
https://github.com/mitchellh/vagrant/issues/4479
|
https://github.com/mitchellh/vagrant/issues/4479
|
||||||
|
|
||||||
Some vagrant commands will work on all VMs at once:
|
Some vagrant commands will work on all VMs at once:
|
||||||
|
|
||||||
------------------
|
------------------
|
||||||
vagrant halt
|
vagrant halt
|
||||||
vagrant destroy -f
|
vagrant destroy -f
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
|
||||||
----------
|
----------
|
||||||
vagrant up
|
vagrant up
|
||||||
----------
|
----------
|
||||||
|
|
||||||
would normally start all the VMs but we've prevented that because that'd
|
would normally start all the VMs but we've prevented that because that'd
|
||||||
consume a ton of ram.
|
consume a ton of ram.
|
||||||
|
|
||||||
|
@ -466,10 +500,13 @@ consume a ton of ram.
|
||||||
In general its best to stick to testing in vagrant because the bats scripts are
|
In general its best to stick to testing in vagrant because the bats scripts are
|
||||||
destructive. When working with a single package its generally faster to run its
|
destructive. When working with a single package its generally faster to run its
|
||||||
tests in a tighter loop than maven provides. In one window:
|
tests in a tighter loop than maven provides. In one window:
|
||||||
|
|
||||||
--------------------------------
|
--------------------------------
|
||||||
mvn -pl distribution/rpm package
|
mvn -pl distribution/rpm package
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
and in another window:
|
and in another window:
|
||||||
|
|
||||||
----------------------------------------------------
|
----------------------------------------------------
|
||||||
vagrant up centos-7 && vagrant ssh centos-7
|
vagrant up centos-7 && vagrant ssh centos-7
|
||||||
cd $RPM
|
cd $RPM
|
||||||
|
@ -477,6 +514,7 @@ sudo bats $BATS/*rpm*.bats
|
||||||
----------------------------------------------------
|
----------------------------------------------------
|
||||||
|
|
||||||
If you wanted to retest all the release artifacts on a single VM you could:
|
If you wanted to retest all the release artifacts on a single VM you could:
|
||||||
|
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
# Build all the distributions fresh but skip recompiling elasticsearch:
|
# Build all the distributions fresh but skip recompiling elasticsearch:
|
||||||
mvn -amd -pl distribution install -DskipTests
|
mvn -amd -pl distribution install -DskipTests
|
||||||
|
|
|
@ -22,32 +22,32 @@
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
Vagrant.configure(2) do |config|
|
Vagrant.configure(2) do |config|
|
||||||
config.vm.define "precise", autostart: false do |config|
|
config.vm.define "precise" do |config|
|
||||||
config.vm.box = "ubuntu/precise64"
|
config.vm.box = "ubuntu/precise64"
|
||||||
ubuntu_common config
|
ubuntu_common config
|
||||||
end
|
end
|
||||||
config.vm.define "trusty", autostart: false do |config|
|
config.vm.define "trusty" do |config|
|
||||||
config.vm.box = "ubuntu/trusty64"
|
config.vm.box = "ubuntu/trusty64"
|
||||||
ubuntu_common config
|
ubuntu_common config
|
||||||
end
|
end
|
||||||
config.vm.define "vivid", autostart: false do |config|
|
config.vm.define "vivid" do |config|
|
||||||
config.vm.box = "ubuntu/vivid64"
|
config.vm.box = "ubuntu/vivid64"
|
||||||
ubuntu_common config
|
ubuntu_common config
|
||||||
end
|
end
|
||||||
config.vm.define "wheezy", autostart: false do |config|
|
config.vm.define "wheezy" do |config|
|
||||||
config.vm.box = "debian/wheezy64"
|
config.vm.box = "debian/wheezy64"
|
||||||
deb_common(config)
|
deb_common(config)
|
||||||
end
|
end
|
||||||
config.vm.define "jessie", autostart: false do |config|
|
config.vm.define "jessie" do |config|
|
||||||
config.vm.box = "debian/jessie64"
|
config.vm.box = "debian/jessie64"
|
||||||
deb_common(config)
|
deb_common(config)
|
||||||
end
|
end
|
||||||
config.vm.define "centos-6", autostart: false do |config|
|
config.vm.define "centos-6" do |config|
|
||||||
# TODO switch from chef to boxcutter to provide?
|
# TODO switch from chef to boxcutter to provide?
|
||||||
config.vm.box = "chef/centos-6.6"
|
config.vm.box = "chef/centos-6.6"
|
||||||
rpm_common(config)
|
rpm_common(config)
|
||||||
end
|
end
|
||||||
config.vm.define "centos-7", autostart: false do |config|
|
config.vm.define "centos-7" do |config|
|
||||||
# There is a centos/7 box but it doesn't have rsync or virtualbox guest
|
# There is a centos/7 box but it doesn't have rsync or virtualbox guest
|
||||||
# stuff on there so its slow to use. So chef it is....
|
# stuff on there so its slow to use. So chef it is....
|
||||||
# TODO switch from chef to boxcutter to provide?
|
# TODO switch from chef to boxcutter to provide?
|
||||||
|
@ -59,11 +59,11 @@ Vagrant.configure(2) do |config|
|
||||||
# config.vm.box = "boxcutter/oel66"
|
# config.vm.box = "boxcutter/oel66"
|
||||||
# rpm_common(config)
|
# rpm_common(config)
|
||||||
# end
|
# end
|
||||||
config.vm.define "oel-7", autostart: false do |config|
|
config.vm.define "oel-7" do |config|
|
||||||
config.vm.box = "boxcutter/oel70"
|
config.vm.box = "boxcutter/oel70"
|
||||||
rpm_common(config)
|
rpm_common(config)
|
||||||
end
|
end
|
||||||
config.vm.define "fedora-22", autostart: false do |config|
|
config.vm.define "fedora-22" do |config|
|
||||||
# Fedora hosts their own 'cloud' images that aren't in Vagrant's Atlas but
|
# Fedora hosts their own 'cloud' images that aren't in Vagrant's Atlas but
|
||||||
# and are missing required stuff like rsync. It'd be nice if we could use
|
# and are missing required stuff like rsync. It'd be nice if we could use
|
||||||
# them but they much slower to get up and running then the boxcutter image.
|
# them but they much slower to get up and running then the boxcutter image.
|
||||||
|
@ -75,6 +75,33 @@ Vagrant.configure(2) do |config|
|
||||||
# the elasticsearch project called vagrant....
|
# the elasticsearch project called vagrant....
|
||||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||||
config.vm.synced_folder "", "/elasticsearch"
|
config.vm.synced_folder "", "/elasticsearch"
|
||||||
|
if Vagrant.has_plugin?("vagrant-cachier")
|
||||||
|
config.cache.scope = :box
|
||||||
|
end
|
||||||
|
config.vm.defined_vms.each do |name, config|
|
||||||
|
config.options[:autostart] = false
|
||||||
|
set_prompt = lambda do |config|
|
||||||
|
# Sets up a consistent prompt for all users. Or tries to. The VM might
|
||||||
|
# contain overrides for root and vagrant but this attempts to work around
|
||||||
|
# them by re-source-ing the standard prompt file.
|
||||||
|
config.vm.provision "prompt", type: "shell", inline: <<-SHELL
|
||||||
|
cat \<\<PROMPT > /etc/profile.d/elasticsearch_prompt.sh
|
||||||
|
export PS1='#{name}:\\w$ '
|
||||||
|
PROMPT
|
||||||
|
grep 'source /etc/profile.d/elasticsearch_prompt.sh' ~/.bashrc |
|
||||||
|
cat \<\<SOURCE_PROMPT >> ~/.bashrc
|
||||||
|
# Replace the standard prompt with a consistent one
|
||||||
|
source /etc/profile.d/elasticsearch_prompt.sh
|
||||||
|
SOURCE_PROMPT
|
||||||
|
grep 'source /etc/profile.d/elasticsearch_prompt.sh' ~vagrant/.bashrc |
|
||||||
|
cat \<\<SOURCE_PROMPT >> ~vagrant/.bashrc
|
||||||
|
# Replace the standard prompt with a consistent one
|
||||||
|
source /etc/profile.d/elasticsearch_prompt.sh
|
||||||
|
SOURCE_PROMPT
|
||||||
|
SHELL
|
||||||
|
end
|
||||||
|
config.config_procs.push ['2', set_prompt]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def ubuntu_common(config)
|
def ubuntu_common(config)
|
||||||
|
@ -90,24 +117,17 @@ end
|
||||||
def deb_common(config)
|
def deb_common(config)
|
||||||
provision(config, "apt-get update", "/var/cache/apt/archives/last_update",
|
provision(config, "apt-get update", "/var/cache/apt/archives/last_update",
|
||||||
"apt-get install -y", "openjdk-7-jdk")
|
"apt-get install -y", "openjdk-7-jdk")
|
||||||
if Vagrant.has_plugin?("vagrant-cachier")
|
|
||||||
config.cache.scope = :box
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def rpm_common(config)
|
def rpm_common(config)
|
||||||
provision(config, "yum check-update", "/var/cache/yum/last_update",
|
provision(config, "yum check-update", "/var/cache/yum/last_update",
|
||||||
"yum install -y", "java-1.7.0-openjdk-devel")
|
"yum install -y", "java-1.7.0-openjdk-devel")
|
||||||
if Vagrant.has_plugin?("vagrant-cachier")
|
|
||||||
config.cache.scope = :box
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def dnf_common(config)
|
def dnf_common(config)
|
||||||
provision(config, "dnf check-update", "/var/cache/dnf/last_update",
|
provision(config, "dnf check-update", "/var/cache/dnf/last_update",
|
||||||
"dnf install -y", "java-1.8.0-openjdk-devel")
|
"dnf install -y", "java-1.8.0-openjdk-devel")
|
||||||
if Vagrant.has_plugin?("vagrant-cachier")
|
if Vagrant.has_plugin?("vagrant-cachier")
|
||||||
config.cache.scope = :box
|
|
||||||
# Autodetect doesn't work....
|
# Autodetect doesn't work....
|
||||||
config.cache.auto_detect = false
|
config.cache.auto_detect = false
|
||||||
config.cache.enable :generic, { :cache_dir => "/var/cache/dnf" }
|
config.cache.enable :generic, { :cache_dir => "/var/cache/dnf" }
|
||||||
|
@ -116,7 +136,7 @@ end
|
||||||
|
|
||||||
|
|
||||||
def provision(config, update_command, update_tracking_file, install_command, java_package)
|
def provision(config, update_command, update_tracking_file, install_command, java_package)
|
||||||
config.vm.provision "elasticsearch bats dependencies", type: "shell", inline: <<-SHELL
|
config.vm.provision "bats dependencies", type: "shell", inline: <<-SHELL
|
||||||
set -e
|
set -e
|
||||||
installed() {
|
installed() {
|
||||||
command -v $1 2>&1 >/dev/null
|
command -v $1 2>&1 >/dev/null
|
||||||
|
@ -150,7 +170,7 @@ export TAR=/elasticsearch/distribution/tar/target/releases
|
||||||
export RPM=/elasticsearch/distribution/rpm/target/releases
|
export RPM=/elasticsearch/distribution/rpm/target/releases
|
||||||
export DEB=/elasticsearch/distribution/deb/target/releases
|
export DEB=/elasticsearch/distribution/deb/target/releases
|
||||||
export TESTROOT=/elasticsearch/qa/vagrant/target/testroot
|
export TESTROOT=/elasticsearch/qa/vagrant/target/testroot
|
||||||
export BATS=/elasticsearch/qa/vagrant/src/test/resources/packaging/scripts/
|
export BATS=/elasticsearch/qa/vagrant/src/test/resources/packaging/scripts
|
||||||
VARS
|
VARS
|
||||||
SHELL
|
SHELL
|
||||||
end
|
end
|
||||||
|
|
|
@ -42,7 +42,7 @@ h3. Installation
|
||||||
|
|
||||||
* "Download":https://www.elastic.co/downloads/elasticsearch and unzip the Elasticsearch official distribution.
|
* "Download":https://www.elastic.co/downloads/elasticsearch and unzip the Elasticsearch official distribution.
|
||||||
* Run @bin/elasticsearch@ on unix, or @bin\elasticsearch.bat@ on windows.
|
* Run @bin/elasticsearch@ on unix, or @bin\elasticsearch.bat@ on windows.
|
||||||
* Run @curl -X GET http://127.0.0.1:9200/@.
|
* Run @curl -X GET http://localhost:9200/@.
|
||||||
* Start more servers ...
|
* Start more servers ...
|
||||||
|
|
||||||
h3. Indexing
|
h3. Indexing
|
||||||
|
@ -50,16 +50,16 @@ h3. Indexing
|
||||||
Let's try and index some twitter like information. First, let's create a twitter user, and add some tweets (the @twitter@ index will be created automatically):
|
Let's try and index some twitter like information. First, let's create a twitter user, and add some tweets (the @twitter@ index will be created automatically):
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XPUT 'http://127.0.0.1:9200/twitter/user/kimchy' -d '{ "name" : "Shay Banon" }'
|
curl -XPUT 'http://localhost:9200/twitter/user/kimchy' -d '{ "name" : "Shay Banon" }'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/twitter/tweet/1' -d '
|
curl -XPUT 'http://localhost:9200/twitter/tweet/1' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T13:12:00",
|
"postDate": "2009-11-15T13:12:00",
|
||||||
"message": "Trying out Elasticsearch, so far so good?"
|
"message": "Trying out Elasticsearch, so far so good?"
|
||||||
}'
|
}'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/twitter/tweet/2' -d '
|
curl -XPUT 'http://localhost:9200/twitter/tweet/2' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T14:12:12",
|
"postDate": "2009-11-15T14:12:12",
|
||||||
|
@ -70,9 +70,9 @@ curl -XPUT 'http://127.0.0.1:9200/twitter/tweet/2' -d '
|
||||||
Now, let's see if the information was added by GETting it:
|
Now, let's see if the information was added by GETting it:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/user/kimchy?pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/user/kimchy?pretty=true'
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/1?pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/tweet/1?pretty=true'
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/2?pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/tweet/2?pretty=true'
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
h3. Searching
|
h3. Searching
|
||||||
|
@ -81,13 +81,13 @@ Mmm search..., shouldn't it be elastic?
|
||||||
Let's find all the tweets that @kimchy@ posted:
|
Let's find all the tweets that @kimchy@ posted:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/_search?q=user:kimchy&pretty=true'
|
curl -XGET 'http://localhost:9200/twitter/tweet/_search?q=user:kimchy&pretty=true'
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
We can also use the JSON query language Elasticsearch provides instead of a query string:
|
We can also use the JSON query language Elasticsearch provides instead of a query string:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/tweet/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/twitter/tweet/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"match" : { "user": "kimchy" }
|
"match" : { "user": "kimchy" }
|
||||||
|
@ -98,7 +98,7 @@ curl -XGET 'http://127.0.0.1:9200/twitter/tweet/_search?pretty=true' -d '
|
||||||
Just for kicks, let's get all the documents stored (we should see the user as well):
|
Just for kicks, let's get all the documents stored (we should see the user as well):
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"matchAll" : {}
|
"matchAll" : {}
|
||||||
|
@ -109,7 +109,7 @@ curl -XGET 'http://127.0.0.1:9200/twitter/_search?pretty=true' -d '
|
||||||
We can also do range search (the @postDate@ was automatically identified as date)
|
We can also do range search (the @postDate@ was automatically identified as date)
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/twitter/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"range" : {
|
"range" : {
|
||||||
|
@ -130,16 +130,16 @@ Elasticsearch supports multiple indices, as well as multiple types per index. In
|
||||||
Another way to define our simple twitter system is to have a different index per user (note, though that each index has an overhead). Here is the indexing curl's in this case:
|
Another way to define our simple twitter system is to have a different index per user (note, though that each index has an overhead). Here is the indexing curl's in this case:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XPUT 'http://127.0.0.1:9200/kimchy/info/1' -d '{ "name" : "Shay Banon" }'
|
curl -XPUT 'http://localhost:9200/kimchy/info/1' -d '{ "name" : "Shay Banon" }'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/kimchy/tweet/1' -d '
|
curl -XPUT 'http://localhost:9200/kimchy/tweet/1' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T13:12:00",
|
"postDate": "2009-11-15T13:12:00",
|
||||||
"message": "Trying out Elasticsearch, so far so good?"
|
"message": "Trying out Elasticsearch, so far so good?"
|
||||||
}'
|
}'
|
||||||
|
|
||||||
curl -XPUT 'http://127.0.0.1:9200/kimchy/tweet/2' -d '
|
curl -XPUT 'http://localhost:9200/kimchy/tweet/2' -d '
|
||||||
{
|
{
|
||||||
"user": "kimchy",
|
"user": "kimchy",
|
||||||
"postDate": "2009-11-15T14:12:12",
|
"postDate": "2009-11-15T14:12:12",
|
||||||
|
@ -152,7 +152,7 @@ The above will index information into the @kimchy@ index, with two types, @info@
|
||||||
Complete control on the index level is allowed. As an example, in the above case, we would want to change from the default 5 shards with 1 replica per index, to only 1 shard with 1 replica per index (== per twitter user). Here is how this can be done (the configuration can be in yaml as well):
|
Complete control on the index level is allowed. As an example, in the above case, we would want to change from the default 5 shards with 1 replica per index, to only 1 shard with 1 replica per index (== per twitter user). Here is how this can be done (the configuration can be in yaml as well):
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XPUT http://127.0.0.1:9200/another_user/ -d '
|
curl -XPUT http://localhost:9200/another_user/ -d '
|
||||||
{
|
{
|
||||||
"index" : {
|
"index" : {
|
||||||
"numberOfShards" : 1,
|
"numberOfShards" : 1,
|
||||||
|
@ -165,7 +165,7 @@ Search (and similar operations) are multi index aware. This means that we can ea
|
||||||
index (twitter user), for example:
|
index (twitter user), for example:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/kimchy,another_user/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"matchAll" : {}
|
"matchAll" : {}
|
||||||
|
@ -176,7 +176,7 @@ curl -XGET 'http://127.0.0.1:9200/kimchy,another_user/_search?pretty=true' -d '
|
||||||
Or on all the indices:
|
Or on all the indices:
|
||||||
|
|
||||||
<pre>
|
<pre>
|
||||||
curl -XGET 'http://127.0.0.1:9200/_search?pretty=true' -d '
|
curl -XGET 'http://localhost:9200/_search?pretty=true' -d '
|
||||||
{
|
{
|
||||||
"query" : {
|
"query" : {
|
||||||
"matchAll" : {}
|
"matchAll" : {}
|
||||||
|
|
|
@ -5,15 +5,14 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>org.elasticsearch</groupId>
|
<groupId>org.elasticsearch</groupId>
|
||||||
<artifactId>elasticsearch-parent</artifactId>
|
<artifactId>parent</artifactId>
|
||||||
<version>2.1.0-SNAPSHOT</version>
|
<version>2.1.0-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<groupId>org.elasticsearch</groupId>
|
<groupId>org.elasticsearch</groupId>
|
||||||
<artifactId>elasticsearch</artifactId>
|
<artifactId>elasticsearch</artifactId>
|
||||||
|
|
||||||
<packaging>jar</packaging>
|
<name>Elasticsearch: Core</name>
|
||||||
<name>Elasticsearch Core</name>
|
|
||||||
<description>Elasticsearch - Open Source, Distributed, RESTful Search Engine</description>
|
<description>Elasticsearch - Open Source, Distributed, RESTful Search Engine</description>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
|
@ -35,7 +35,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
|
||||||
private boolean process = true;
|
private boolean process = true;
|
||||||
private boolean jvm = true;
|
private boolean jvm = true;
|
||||||
private boolean threadPool = true;
|
private boolean threadPool = true;
|
||||||
private boolean network = true;
|
|
||||||
private boolean transport = true;
|
private boolean transport = true;
|
||||||
private boolean http = true;
|
private boolean http = true;
|
||||||
private boolean plugins = true;
|
private boolean plugins = true;
|
||||||
|
@ -60,7 +59,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
|
||||||
process = false;
|
process = false;
|
||||||
jvm = false;
|
jvm = false;
|
||||||
threadPool = false;
|
threadPool = false;
|
||||||
network = false;
|
|
||||||
transport = false;
|
transport = false;
|
||||||
http = false;
|
http = false;
|
||||||
plugins = false;
|
plugins = false;
|
||||||
|
@ -76,7 +74,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
|
||||||
process = true;
|
process = true;
|
||||||
jvm = true;
|
jvm = true;
|
||||||
threadPool = true;
|
threadPool = true;
|
||||||
network = true;
|
|
||||||
transport = true;
|
transport = true;
|
||||||
http = true;
|
http = true;
|
||||||
plugins = true;
|
plugins = true;
|
||||||
|
@ -158,21 +155,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Should the node Network be returned.
|
|
||||||
*/
|
|
||||||
public boolean network() {
|
|
||||||
return this.network;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Should the node Network be returned.
|
|
||||||
*/
|
|
||||||
public NodesInfoRequest network(boolean network) {
|
|
||||||
this.network = network;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Should the node Transport be returned.
|
* Should the node Transport be returned.
|
||||||
*/
|
*/
|
||||||
|
@ -228,7 +210,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
|
||||||
process = in.readBoolean();
|
process = in.readBoolean();
|
||||||
jvm = in.readBoolean();
|
jvm = in.readBoolean();
|
||||||
threadPool = in.readBoolean();
|
threadPool = in.readBoolean();
|
||||||
network = in.readBoolean();
|
|
||||||
transport = in.readBoolean();
|
transport = in.readBoolean();
|
||||||
http = in.readBoolean();
|
http = in.readBoolean();
|
||||||
plugins = in.readBoolean();
|
plugins = in.readBoolean();
|
||||||
|
@ -242,7 +223,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
|
||||||
out.writeBoolean(process);
|
out.writeBoolean(process);
|
||||||
out.writeBoolean(jvm);
|
out.writeBoolean(jvm);
|
||||||
out.writeBoolean(threadPool);
|
out.writeBoolean(threadPool);
|
||||||
out.writeBoolean(network);
|
|
||||||
out.writeBoolean(transport);
|
out.writeBoolean(transport);
|
||||||
out.writeBoolean(http);
|
out.writeBoolean(http);
|
||||||
out.writeBoolean(plugins);
|
out.writeBoolean(plugins);
|
||||||
|
|
|
@ -87,14 +87,6 @@ public class NodesInfoRequestBuilder extends NodesOperationRequestBuilder<NodesI
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Should the node Network info be returned.
|
|
||||||
*/
|
|
||||||
public NodesInfoRequestBuilder setNetwork(boolean network) {
|
|
||||||
request.network(network);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Should the node Transport info be returned.
|
* Should the node Transport info be returned.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -80,7 +80,7 @@ public class TransportNodesInfoAction extends TransportNodesAction<NodesInfoRequ
|
||||||
protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) {
|
protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) {
|
||||||
NodesInfoRequest request = nodeRequest.request;
|
NodesInfoRequest request = nodeRequest.request;
|
||||||
return nodeService.info(request.settings(), request.os(), request.process(), request.jvm(), request.threadPool(),
|
return nodeService.info(request.settings(), request.os(), request.process(), request.jvm(), request.threadPool(),
|
||||||
request.network(), request.transport(), request.http(), request.plugins());
|
request.transport(), request.http(), request.plugins());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -36,7 +36,6 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||||
private boolean process;
|
private boolean process;
|
||||||
private boolean jvm;
|
private boolean jvm;
|
||||||
private boolean threadPool;
|
private boolean threadPool;
|
||||||
private boolean network;
|
|
||||||
private boolean fs;
|
private boolean fs;
|
||||||
private boolean transport;
|
private boolean transport;
|
||||||
private boolean http;
|
private boolean http;
|
||||||
|
@ -63,7 +62,6 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||||
this.process = true;
|
this.process = true;
|
||||||
this.jvm = true;
|
this.jvm = true;
|
||||||
this.threadPool = true;
|
this.threadPool = true;
|
||||||
this.network = true;
|
|
||||||
this.fs = true;
|
this.fs = true;
|
||||||
this.transport = true;
|
this.transport = true;
|
||||||
this.http = true;
|
this.http = true;
|
||||||
|
@ -81,7 +79,6 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||||
this.process = false;
|
this.process = false;
|
||||||
this.jvm = false;
|
this.jvm = false;
|
||||||
this.threadPool = false;
|
this.threadPool = false;
|
||||||
this.network = false;
|
|
||||||
this.fs = false;
|
this.fs = false;
|
||||||
this.transport = false;
|
this.transport = false;
|
||||||
this.http = false;
|
this.http = false;
|
||||||
|
@ -171,21 +168,6 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Should the node Network be returned.
|
|
||||||
*/
|
|
||||||
public boolean network() {
|
|
||||||
return this.network;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Should the node Network be returned.
|
|
||||||
*/
|
|
||||||
public NodesStatsRequest network(boolean network) {
|
|
||||||
this.network = network;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Should the node file system stats be returned.
|
* Should the node file system stats be returned.
|
||||||
*/
|
*/
|
||||||
|
@ -260,7 +242,6 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||||
process = in.readBoolean();
|
process = in.readBoolean();
|
||||||
jvm = in.readBoolean();
|
jvm = in.readBoolean();
|
||||||
threadPool = in.readBoolean();
|
threadPool = in.readBoolean();
|
||||||
network = in.readBoolean();
|
|
||||||
fs = in.readBoolean();
|
fs = in.readBoolean();
|
||||||
transport = in.readBoolean();
|
transport = in.readBoolean();
|
||||||
http = in.readBoolean();
|
http = in.readBoolean();
|
||||||
|
@ -276,7 +257,6 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||||
out.writeBoolean(process);
|
out.writeBoolean(process);
|
||||||
out.writeBoolean(jvm);
|
out.writeBoolean(jvm);
|
||||||
out.writeBoolean(threadPool);
|
out.writeBoolean(threadPool);
|
||||||
out.writeBoolean(network);
|
|
||||||
out.writeBoolean(fs);
|
out.writeBoolean(fs);
|
||||||
out.writeBoolean(transport);
|
out.writeBoolean(transport);
|
||||||
out.writeBoolean(http);
|
out.writeBoolean(http);
|
||||||
|
|
|
@ -107,14 +107,6 @@ public class NodesStatsRequestBuilder extends NodesOperationRequestBuilder<Nodes
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Should the node Network stats be returned.
|
|
||||||
*/
|
|
||||||
public NodesStatsRequestBuilder setNetwork(boolean network) {
|
|
||||||
request.network(network);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Should the node file system stats be returned.
|
* Should the node file system stats be returned.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class TransportNodesStatsAction extends TransportNodesAction<NodesStatsRe
|
||||||
@Override
|
@Override
|
||||||
protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) {
|
protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) {
|
||||||
NodesStatsRequest request = nodeStatsRequest.request;
|
NodesStatsRequest request = nodeStatsRequest.request;
|
||||||
return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(), request.network(),
|
return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(),
|
||||||
request.fs(), request.transport(), request.http(), request.breaker(), request.script());
|
request.fs(), request.transport(), request.http(), request.breaker(), request.script());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -99,8 +99,8 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) {
|
protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) {
|
||||||
NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, false, true, false, true);
|
NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, true, false, true);
|
||||||
NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, false, true, false, false, false, false);
|
NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, true, false, false, false, false);
|
||||||
List<ShardStats> shardsStats = new ArrayList<>();
|
List<ShardStats> shardsStats = new ArrayList<>();
|
||||||
for (IndexService indexService : indicesService) {
|
for (IndexService indexService : indicesService) {
|
||||||
for (IndexShard indexShard : indexService) {
|
for (IndexShard indexShard : indexService) {
|
||||||
|
|
|
@ -145,6 +145,10 @@ public class BulkProcessor implements Closeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Builder builder(Client client, Listener listener) {
|
public static Builder builder(Client client, Listener listener) {
|
||||||
|
if (client == null) {
|
||||||
|
throw new NullPointerException("The client you specified while building a BulkProcessor is null");
|
||||||
|
}
|
||||||
|
|
||||||
return new Builder(client, listener);
|
return new Builder(client, listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -125,9 +125,9 @@ public class ClusterModule extends AbstractModule {
|
||||||
private final Settings settings;
|
private final Settings settings;
|
||||||
private final DynamicSettings.Builder clusterDynamicSettings = new DynamicSettings.Builder();
|
private final DynamicSettings.Builder clusterDynamicSettings = new DynamicSettings.Builder();
|
||||||
private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder();
|
private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder();
|
||||||
private final ExtensionPoint.TypeExtensionPoint<ShardsAllocator> shardsAllocators = new ExtensionPoint.TypeExtensionPoint<>("shards_allocator", ShardsAllocator.class);
|
private final ExtensionPoint.SelectedType<ShardsAllocator> shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class);
|
||||||
private final ExtensionPoint.SetExtensionPoint<AllocationDecider> allocationDeciders = new ExtensionPoint.SetExtensionPoint<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class);
|
private final ExtensionPoint.ClassSet<AllocationDecider> allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class);
|
||||||
private final ExtensionPoint.SetExtensionPoint<IndexTemplateFilter> indexTemplateFilters = new ExtensionPoint.SetExtensionPoint<>("index_template_filter", IndexTemplateFilter.class);
|
private final ExtensionPoint.ClassSet<IndexTemplateFilter> indexTemplateFilters = new ExtensionPoint.ClassSet<>("index_template_filter", IndexTemplateFilter.class);
|
||||||
|
|
||||||
// pkg private so tests can mock
|
// pkg private so tests can mock
|
||||||
Class<? extends ClusterInfoService> clusterInfoServiceImpl = InternalClusterInfoService.class;
|
Class<? extends ClusterInfoService> clusterInfoServiceImpl = InternalClusterInfoService.class;
|
||||||
|
@ -168,7 +168,7 @@ public class ClusterModule extends AbstractModule {
|
||||||
registerClusterDynamicSetting(IndicesTTLService.INDICES_TTL_INTERVAL, Validator.TIME);
|
registerClusterDynamicSetting(IndicesTTLService.INDICES_TTL_INTERVAL, Validator.TIME);
|
||||||
registerClusterDynamicSetting(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, Validator.TIME);
|
registerClusterDynamicSetting(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, Validator.TIME);
|
||||||
registerClusterDynamicSetting(MetaData.SETTING_READ_ONLY, Validator.EMPTY);
|
registerClusterDynamicSetting(MetaData.SETTING_READ_ONLY, Validator.EMPTY);
|
||||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, Validator.BYTES_SIZE);
|
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, Validator.POSITIVE_BYTES_SIZE);
|
||||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS, Validator.INTEGER);
|
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS, Validator.INTEGER);
|
||||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, Validator.BYTES_SIZE);
|
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, Validator.BYTES_SIZE);
|
||||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_COMPRESS, Validator.EMPTY);
|
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_COMPRESS, Validator.EMPTY);
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.cluster.node;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.net.InetAddress;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.common.transport.TransportAddressSerializers.addressToStream;
|
import static org.elasticsearch.common.transport.TransportAddressSerializers.addressToStream;
|
||||||
|
@ -136,7 +138,7 @@ public class DiscoveryNode implements Streamable, ToXContent {
|
||||||
* @param version the version of the node.
|
* @param version the version of the node.
|
||||||
*/
|
*/
|
||||||
public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes, Version version) {
|
public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes, Version version) {
|
||||||
this(nodeName, nodeId, NetworkUtils.getLocalHostName(""), NetworkUtils.getLocalHostAddress(""), address, attributes, version);
|
this(nodeName, nodeId, address.getHost(), address.getAddress(), address, attributes, version);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -40,6 +40,8 @@ import org.elasticsearch.common.logging.ESLogger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.text.StringText;
|
import org.elasticsearch.common.text.StringText;
|
||||||
|
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||||
|
import org.elasticsearch.common.transport.TransportAddress;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.*;
|
import org.elasticsearch.common.util.concurrent.*;
|
||||||
import org.elasticsearch.discovery.Discovery;
|
import org.elasticsearch.discovery.Discovery;
|
||||||
|
@ -159,7 +161,8 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
|
||||||
Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes();
|
Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes();
|
||||||
// note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling
|
// note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling
|
||||||
final String nodeId = DiscoveryService.generateNodeId(settings);
|
final String nodeId = DiscoveryService.generateNodeId(settings);
|
||||||
DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, transportService.boundAddress().publishAddress(), nodeAttributes, version);
|
final TransportAddress publishAddress = transportService.boundAddress().publishAddress();
|
||||||
|
DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, publishAddress, nodeAttributes, version);
|
||||||
DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id());
|
DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id());
|
||||||
this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build();
|
this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build();
|
||||||
this.transportService.setLocalNode(localNode);
|
this.transportService.setLocalNode(localNode);
|
||||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.cluster.settings;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
|
||||||
import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue;
|
import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue;
|
||||||
|
@ -228,6 +229,21 @@ public interface Validator {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Validator POSITIVE_BYTES_SIZE = new Validator() {
|
||||||
|
@Override
|
||||||
|
public String validate(String setting, String value, ClusterState state) {
|
||||||
|
try {
|
||||||
|
ByteSizeValue byteSizeValue = parseBytesSizeValue(value, setting);
|
||||||
|
if (byteSizeValue.getBytes() <= 0) {
|
||||||
|
return setting + " must be a positive byte size value";
|
||||||
|
}
|
||||||
|
} catch (ElasticsearchParseException ex) {
|
||||||
|
return ex.getMessage();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
Validator PERCENTAGE = new Validator() {
|
Validator PERCENTAGE = new Validator() {
|
||||||
@Override
|
@Override
|
||||||
public String validate(String setting, String value, ClusterState clusterState) {
|
public String validate(String setting, String value, ClusterState clusterState) {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.geo;
|
package org.elasticsearch.common.geo;
|
||||||
|
|
||||||
import org.elasticsearch.common.Classes;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class ShapesAvailability {
|
public class ShapesAvailability {
|
||||||
|
@ -48,8 +46,5 @@ public class ShapesAvailability {
|
||||||
JTS_AVAILABLE = xJTS_AVAILABLE;
|
JTS_AVAILABLE = xJTS_AVAILABLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private ShapesAvailability() {}
|
||||||
private ShapesAvailability() {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,31 @@ import static com.google.common.base.Preconditions.checkState;
|
||||||
* @since 2.0
|
* @since 2.0
|
||||||
*/
|
*/
|
||||||
public final class ProviderLookup<T> implements Element {
|
public final class ProviderLookup<T> implements Element {
|
||||||
|
|
||||||
|
// NOTE: this class is not part of guice and was added so the provder lookup's key can be acessible for tests
|
||||||
|
public static class ProviderImpl<T> implements Provider<T> {
|
||||||
|
private ProviderLookup<T> lookup;
|
||||||
|
|
||||||
|
private ProviderImpl(ProviderLookup<T> lookup) {
|
||||||
|
this.lookup = lookup;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public T get() {
|
||||||
|
checkState(lookup.delegate != null,
|
||||||
|
"This Provider cannot be used until the Injector has been created.");
|
||||||
|
return lookup.delegate.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "Provider<" + lookup.key.getTypeLiteral() + ">";
|
||||||
|
}
|
||||||
|
|
||||||
|
public Key<T> getKey() {
|
||||||
|
return lookup.getKey();
|
||||||
|
}
|
||||||
|
}
|
||||||
private final Object source;
|
private final Object source;
|
||||||
private final Key<T> key;
|
private final Key<T> key;
|
||||||
private Provider<T> delegate;
|
private Provider<T> delegate;
|
||||||
|
@ -86,18 +111,6 @@ public final class ProviderLookup<T> implements Element {
|
||||||
* IllegalStateException} if you try to use it beforehand.
|
* IllegalStateException} if you try to use it beforehand.
|
||||||
*/
|
*/
|
||||||
public Provider<T> getProvider() {
|
public Provider<T> getProvider() {
|
||||||
return new Provider<T>() {
|
return new ProviderImpl<>(this);
|
||||||
@Override
|
|
||||||
public T get() {
|
|
||||||
checkState(delegate != null,
|
|
||||||
"This Provider cannot be used until the Injector has been created.");
|
|
||||||
return delegate.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "Provider<" + key.getTypeLiteral() + ">";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.common.logging;
|
package org.elasticsearch.common.logging;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
import org.apache.lucene.util.SuppressForbidden;
|
||||||
import org.elasticsearch.common.Classes;
|
import org.elasticsearch.common.Classes;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
|
@ -74,20 +75,27 @@ public class Loggers {
|
||||||
return getLogger(buildClassLoggerName(clazz), settings, prefixes);
|
return getLogger(buildClassLoggerName(clazz), settings, prefixes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressForbidden(reason = "using localhost for logging on which host it is is fine")
|
||||||
|
private static InetAddress getHostAddress() {
|
||||||
|
try {
|
||||||
|
return InetAddress.getLocalHost();
|
||||||
|
} catch (UnknownHostException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) {
|
public static ESLogger getLogger(String loggerName, Settings settings, String... prefixes) {
|
||||||
List<String> prefixesList = newArrayList();
|
List<String> prefixesList = newArrayList();
|
||||||
if (settings.getAsBoolean("logger.logHostAddress", false)) {
|
if (settings.getAsBoolean("logger.logHostAddress", false)) {
|
||||||
try {
|
final InetAddress addr = getHostAddress();
|
||||||
prefixesList.add(InetAddress.getLocalHost().getHostAddress());
|
if (addr != null) {
|
||||||
} catch (UnknownHostException e) {
|
prefixesList.add(addr.getHostAddress());
|
||||||
// ignore
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (settings.getAsBoolean("logger.logHostName", false)) {
|
if (settings.getAsBoolean("logger.logHostName", false)) {
|
||||||
try {
|
final InetAddress addr = getHostAddress();
|
||||||
prefixesList.add(InetAddress.getLocalHost().getHostName());
|
if (addr != null) {
|
||||||
} catch (UnknownHostException e) {
|
prefixesList.add(addr.getHostName());
|
||||||
// ignore
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
String name = settings.get("name");
|
String name = settings.get("name");
|
||||||
|
|
|
@ -28,11 +28,8 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.NetworkInterface;
|
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.concurrent.CopyOnWriteArrayList;
|
import java.util.concurrent.CopyOnWriteArrayList;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
@ -41,7 +38,8 @@ import java.util.concurrent.TimeUnit;
|
||||||
*/
|
*/
|
||||||
public class NetworkService extends AbstractComponent {
|
public class NetworkService extends AbstractComponent {
|
||||||
|
|
||||||
public static final String LOCAL = "#local#";
|
/** By default, we bind to loopback interfaces */
|
||||||
|
public static final String DEFAULT_NETWORK_HOST = "_local_";
|
||||||
|
|
||||||
private static final String GLOBAL_NETWORK_HOST_SETTING = "network.host";
|
private static final String GLOBAL_NETWORK_HOST_SETTING = "network.host";
|
||||||
private static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host";
|
private static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host";
|
||||||
|
@ -71,12 +69,12 @@ public class NetworkService extends AbstractComponent {
|
||||||
/**
|
/**
|
||||||
* Resolves the default value if possible. If not, return <tt>null</tt>.
|
* Resolves the default value if possible. If not, return <tt>null</tt>.
|
||||||
*/
|
*/
|
||||||
InetAddress resolveDefault();
|
InetAddress[] resolveDefault();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves a custom value handling, return <tt>null</tt> if can't handle it.
|
* Resolves a custom value handling, return <tt>null</tt> if can't handle it.
|
||||||
*/
|
*/
|
||||||
InetAddress resolveIfPossible(String value);
|
InetAddress[] resolveIfPossible(String value);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final List<CustomNameResolver> customNameResolvers = new CopyOnWriteArrayList<>();
|
private final List<CustomNameResolver> customNameResolvers = new CopyOnWriteArrayList<>();
|
||||||
|
@ -94,100 +92,86 @@ public class NetworkService extends AbstractComponent {
|
||||||
customNameResolvers.add(customNameResolver);
|
customNameResolvers.add(customNameResolver);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public InetAddress[] resolveBindHostAddress(String bindHost) throws IOException {
|
||||||
public InetAddress resolveBindHostAddress(String bindHost) throws IOException {
|
// first check settings
|
||||||
return resolveBindHostAddress(bindHost, InetAddress.getLoopbackAddress().getHostAddress());
|
if (bindHost == null) {
|
||||||
}
|
bindHost = settings.get(GLOBAL_NETWORK_BINDHOST_SETTING, settings.get(GLOBAL_NETWORK_HOST_SETTING));
|
||||||
|
|
||||||
public InetAddress resolveBindHostAddress(String bindHost, String defaultValue2) throws IOException {
|
|
||||||
return resolveInetAddress(bindHost, settings.get(GLOBAL_NETWORK_BINDHOST_SETTING, settings.get(GLOBAL_NETWORK_HOST_SETTING)), defaultValue2);
|
|
||||||
}
|
|
||||||
|
|
||||||
public InetAddress resolvePublishHostAddress(String publishHost) throws IOException {
|
|
||||||
InetAddress address = resolvePublishHostAddress(publishHost,
|
|
||||||
InetAddress.getLoopbackAddress().getHostAddress());
|
|
||||||
// verify that its not a local address
|
|
||||||
if (address == null || address.isAnyLocalAddress()) {
|
|
||||||
address = NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv4);
|
|
||||||
if (address == null) {
|
|
||||||
address = NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.getIpStackType());
|
|
||||||
if (address == null) {
|
|
||||||
address = NetworkUtils.getLocalAddress();
|
|
||||||
if (address == null) {
|
|
||||||
return NetworkUtils.getLocalhost(NetworkUtils.StackType.IPv4);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return address;
|
// next check any registered custom resolvers
|
||||||
}
|
if (bindHost == null) {
|
||||||
|
|
||||||
public InetAddress resolvePublishHostAddress(String publishHost, String defaultValue2) throws IOException {
|
|
||||||
return resolveInetAddress(publishHost, settings.get(GLOBAL_NETWORK_PUBLISHHOST_SETTING, settings.get(GLOBAL_NETWORK_HOST_SETTING)), defaultValue2);
|
|
||||||
}
|
|
||||||
|
|
||||||
public InetAddress resolveInetAddress(String host, String defaultValue1, String defaultValue2) throws UnknownHostException, IOException {
|
|
||||||
if (host == null) {
|
|
||||||
host = defaultValue1;
|
|
||||||
}
|
|
||||||
if (host == null) {
|
|
||||||
host = defaultValue2;
|
|
||||||
}
|
|
||||||
if (host == null) {
|
|
||||||
for (CustomNameResolver customNameResolver : customNameResolvers) {
|
for (CustomNameResolver customNameResolver : customNameResolvers) {
|
||||||
InetAddress inetAddress = customNameResolver.resolveDefault();
|
InetAddress addresses[] = customNameResolver.resolveDefault();
|
||||||
if (inetAddress != null) {
|
if (addresses != null) {
|
||||||
return inetAddress;
|
return addresses;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
String origHost = host;
|
// finally, fill with our default
|
||||||
|
if (bindHost == null) {
|
||||||
|
bindHost = DEFAULT_NETWORK_HOST;
|
||||||
|
}
|
||||||
|
return resolveInetAddress(bindHost);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: needs to be InetAddress[]
|
||||||
|
public InetAddress resolvePublishHostAddress(String publishHost) throws IOException {
|
||||||
|
// first check settings
|
||||||
|
if (publishHost == null) {
|
||||||
|
publishHost = settings.get(GLOBAL_NETWORK_PUBLISHHOST_SETTING, settings.get(GLOBAL_NETWORK_HOST_SETTING));
|
||||||
|
}
|
||||||
|
// next check any registered custom resolvers
|
||||||
|
if (publishHost == null) {
|
||||||
|
for (CustomNameResolver customNameResolver : customNameResolvers) {
|
||||||
|
InetAddress addresses[] = customNameResolver.resolveDefault();
|
||||||
|
if (addresses != null) {
|
||||||
|
return addresses[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// finally, fill with our default
|
||||||
|
if (publishHost == null) {
|
||||||
|
publishHost = DEFAULT_NETWORK_HOST;
|
||||||
|
}
|
||||||
|
// TODO: allow publishing multiple addresses
|
||||||
|
return resolveInetAddress(publishHost)[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
private InetAddress[] resolveInetAddress(String host) throws UnknownHostException, IOException {
|
||||||
if ((host.startsWith("#") && host.endsWith("#")) || (host.startsWith("_") && host.endsWith("_"))) {
|
if ((host.startsWith("#") && host.endsWith("#")) || (host.startsWith("_") && host.endsWith("_"))) {
|
||||||
host = host.substring(1, host.length() - 1);
|
host = host.substring(1, host.length() - 1);
|
||||||
|
// allow custom resolvers to have special names
|
||||||
for (CustomNameResolver customNameResolver : customNameResolvers) {
|
for (CustomNameResolver customNameResolver : customNameResolvers) {
|
||||||
InetAddress inetAddress = customNameResolver.resolveIfPossible(host);
|
InetAddress addresses[] = customNameResolver.resolveIfPossible(host);
|
||||||
if (inetAddress != null) {
|
if (addresses != null) {
|
||||||
return inetAddress;
|
return addresses;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
switch (host) {
|
||||||
if (host.equals("local")) {
|
case "local":
|
||||||
return NetworkUtils.getLocalAddress();
|
return NetworkUtils.getLoopbackAddresses();
|
||||||
} else if (host.startsWith("non_loopback")) {
|
case "local:ipv4":
|
||||||
if (host.toLowerCase(Locale.ROOT).endsWith(":ipv4")) {
|
return NetworkUtils.filterIPV4(NetworkUtils.getLoopbackAddresses());
|
||||||
return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv4);
|
case "local:ipv6":
|
||||||
} else if (host.toLowerCase(Locale.ROOT).endsWith(":ipv6")) {
|
return NetworkUtils.filterIPV6(NetworkUtils.getLoopbackAddresses());
|
||||||
return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.StackType.IPv6);
|
case "non_loopback":
|
||||||
} else {
|
return NetworkUtils.getFirstNonLoopbackAddresses();
|
||||||
return NetworkUtils.getFirstNonLoopbackAddress(NetworkUtils.getIpStackType());
|
case "non_loopback:ipv4":
|
||||||
}
|
return NetworkUtils.filterIPV4(NetworkUtils.getFirstNonLoopbackAddresses());
|
||||||
} else {
|
case "non_loopback:ipv6":
|
||||||
NetworkUtils.StackType stackType = NetworkUtils.getIpStackType();
|
return NetworkUtils.filterIPV6(NetworkUtils.getFirstNonLoopbackAddresses());
|
||||||
if (host.toLowerCase(Locale.ROOT).endsWith(":ipv4")) {
|
default:
|
||||||
stackType = NetworkUtils.StackType.IPv4;
|
/* an interface specification */
|
||||||
host = host.substring(0, host.length() - 5);
|
if (host.endsWith(":ipv4")) {
|
||||||
} else if (host.toLowerCase(Locale.ROOT).endsWith(":ipv6")) {
|
host = host.substring(0, host.length() - 5);
|
||||||
stackType = NetworkUtils.StackType.IPv6;
|
return NetworkUtils.filterIPV4(NetworkUtils.getAddressesForInterface(host));
|
||||||
host = host.substring(0, host.length() - 5);
|
} else if (host.endsWith(":ipv6")) {
|
||||||
}
|
host = host.substring(0, host.length() - 5);
|
||||||
Collection<NetworkInterface> allInterfs = NetworkUtils.getAllAvailableInterfaces();
|
return NetworkUtils.filterIPV6(NetworkUtils.getAddressesForInterface(host));
|
||||||
for (NetworkInterface ni : allInterfs) {
|
} else {
|
||||||
if (!ni.isUp()) {
|
return NetworkUtils.getAddressesForInterface(host);
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
if (host.equals(ni.getName()) || host.equals(ni.getDisplayName())) {
|
|
||||||
if (ni.isLoopback()) {
|
|
||||||
return NetworkUtils.getFirstAddress(ni, stackType);
|
|
||||||
} else {
|
|
||||||
return NetworkUtils.getFirstNonLoopbackAddress(ni, stackType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
throw new IOException("Failed to find network interface for [" + origHost + "]");
|
|
||||||
}
|
}
|
||||||
return InetAddress.getByName(host);
|
return NetworkUtils.getAllByName(host);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,303 +19,194 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.network;
|
package org.elasticsearch.common.network;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
|
||||||
import org.apache.lucene.util.Constants;
|
import org.apache.lucene.util.Constants;
|
||||||
import org.elasticsearch.common.logging.ESLogger;
|
import org.elasticsearch.common.logging.ESLogger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
|
|
||||||
import java.net.*;
|
import java.net.Inet4Address;
|
||||||
import java.util.*;
|
import java.net.Inet6Address;
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.net.NetworkInterface;
|
||||||
|
import java.net.SocketException;
|
||||||
|
import java.net.UnknownHostException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Utilities for network interfaces / addresses
|
||||||
*/
|
*/
|
||||||
public abstract class NetworkUtils {
|
public abstract class NetworkUtils {
|
||||||
|
|
||||||
|
/** no instantation */
|
||||||
|
private NetworkUtils() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default we bind to any addresses on an interface/name, unless restricted by :ipv4 etc.
|
||||||
|
* This property is unrelated to that, this is about what we *publish*. Today the code pretty much
|
||||||
|
* expects one address so this is used for the sort order.
|
||||||
|
* @deprecated transition mechanism only
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
static final boolean PREFER_V6 = Boolean.parseBoolean(System.getProperty("java.net.preferIPv6Addresses", "false"));
|
||||||
|
|
||||||
|
/** Sorts an address by preference. This way code like publishing can just pick the first one */
|
||||||
|
static int sortKey(InetAddress address, boolean prefer_v6) {
|
||||||
|
int key = address.getAddress().length;
|
||||||
|
if (prefer_v6) {
|
||||||
|
key = -key;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (address.isAnyLocalAddress()) {
|
||||||
|
key += 5;
|
||||||
|
}
|
||||||
|
if (address.isMulticastAddress()) {
|
||||||
|
key += 4;
|
||||||
|
}
|
||||||
|
if (address.isLoopbackAddress()) {
|
||||||
|
key += 3;
|
||||||
|
}
|
||||||
|
if (address.isLinkLocalAddress()) {
|
||||||
|
key += 2;
|
||||||
|
}
|
||||||
|
if (address.isSiteLocalAddress()) {
|
||||||
|
key += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sorts addresses by order of preference. This is used to pick the first one for publishing
|
||||||
|
* @deprecated remove this when multihoming is really correct
|
||||||
|
*/
|
||||||
|
@Deprecated
|
||||||
|
private static void sortAddresses(List<InetAddress> list) {
|
||||||
|
Collections.sort(list, new Comparator<InetAddress>() {
|
||||||
|
@Override
|
||||||
|
public int compare(InetAddress left, InetAddress right) {
|
||||||
|
int cmp = Integer.compare(sortKey(left, PREFER_V6), sortKey(right, PREFER_V6));
|
||||||
|
if (cmp == 0) {
|
||||||
|
cmp = new BytesRef(left.getAddress()).compareTo(new BytesRef(right.getAddress()));
|
||||||
|
}
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
private final static ESLogger logger = Loggers.getLogger(NetworkUtils.class);
|
private final static ESLogger logger = Loggers.getLogger(NetworkUtils.class);
|
||||||
|
|
||||||
public static enum StackType {
|
/** Return all interfaces (and subinterfaces) on the system */
|
||||||
IPv4, IPv6, Unknown
|
static List<NetworkInterface> getInterfaces() throws SocketException {
|
||||||
|
List<NetworkInterface> all = new ArrayList<>();
|
||||||
|
addAllInterfaces(all, Collections.list(NetworkInterface.getNetworkInterfaces()));
|
||||||
|
Collections.sort(all, new Comparator<NetworkInterface>() {
|
||||||
|
@Override
|
||||||
|
public int compare(NetworkInterface left, NetworkInterface right) {
|
||||||
|
return Integer.compare(left.getIndex(), right.getIndex());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return all;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final String IPv4_SETTING = "java.net.preferIPv4Stack";
|
/** Helper for getInterfaces, recursively adds subinterfaces to {@code target} */
|
||||||
public static final String IPv6_SETTING = "java.net.preferIPv6Addresses";
|
private static void addAllInterfaces(List<NetworkInterface> target, List<NetworkInterface> level) {
|
||||||
|
if (!level.isEmpty()) {
|
||||||
public static final String NON_LOOPBACK_ADDRESS = "non_loopback_address";
|
target.addAll(level);
|
||||||
|
for (NetworkInterface intf : level) {
|
||||||
private final static InetAddress localAddress;
|
addAllInterfaces(target, Collections.list(intf.getSubInterfaces()));
|
||||||
|
}
|
||||||
static {
|
|
||||||
InetAddress localAddressX;
|
|
||||||
try {
|
|
||||||
localAddressX = InetAddress.getLocalHost();
|
|
||||||
} catch (Throwable e) {
|
|
||||||
logger.warn("failed to resolve local host, fallback to loopback", e);
|
|
||||||
localAddressX = InetAddress.getLoopbackAddress();
|
|
||||||
}
|
}
|
||||||
localAddress = localAddressX;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Returns system default for SO_REUSEADDR */
|
||||||
public static boolean defaultReuseAddress() {
|
public static boolean defaultReuseAddress() {
|
||||||
return Constants.WINDOWS ? false : true;
|
return Constants.WINDOWS ? false : true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isIPv4() {
|
/** Returns addresses for all loopback interfaces that are up. */
|
||||||
return System.getProperty("java.net.preferIPv4Stack") != null && System.getProperty("java.net.preferIPv4Stack").equals("true");
|
public static InetAddress[] getLoopbackAddresses() throws SocketException {
|
||||||
}
|
List<InetAddress> list = new ArrayList<>();
|
||||||
|
|
||||||
public static InetAddress getIPv4Localhost() throws UnknownHostException {
|
|
||||||
return getLocalhost(StackType.IPv4);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static InetAddress getIPv6Localhost() throws UnknownHostException {
|
|
||||||
return getLocalhost(StackType.IPv6);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static InetAddress getLocalAddress() {
|
|
||||||
return localAddress;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getLocalHostName(String defaultHostName) {
|
|
||||||
if (localAddress == null) {
|
|
||||||
return defaultHostName;
|
|
||||||
}
|
|
||||||
String hostName = localAddress.getHostName();
|
|
||||||
if (hostName == null) {
|
|
||||||
return defaultHostName;
|
|
||||||
}
|
|
||||||
return hostName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getLocalHostAddress(String defaultHostAddress) {
|
|
||||||
if (localAddress == null) {
|
|
||||||
return defaultHostAddress;
|
|
||||||
}
|
|
||||||
String hostAddress = localAddress.getHostAddress();
|
|
||||||
if (hostAddress == null) {
|
|
||||||
return defaultHostAddress;
|
|
||||||
}
|
|
||||||
return hostAddress;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static InetAddress getLocalhost(StackType ip_version) throws UnknownHostException {
|
|
||||||
if (ip_version == StackType.IPv4)
|
|
||||||
return InetAddress.getByName("127.0.0.1");
|
|
||||||
else
|
|
||||||
return InetAddress.getByName("::1");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the first non-loopback address on any interface on the current host.
|
|
||||||
*
|
|
||||||
* @param ip_version Constraint on IP version of address to be returned, 4 or 6
|
|
||||||
*/
|
|
||||||
public static InetAddress getFirstNonLoopbackAddress(StackType ip_version) throws SocketException {
|
|
||||||
InetAddress address;
|
|
||||||
for (NetworkInterface intf : getInterfaces()) {
|
for (NetworkInterface intf : getInterfaces()) {
|
||||||
try {
|
if (intf.isLoopback() && intf.isUp()) {
|
||||||
if (!intf.isUp() || intf.isLoopback())
|
list.addAll(Collections.list(intf.getInetAddresses()));
|
||||||
continue;
|
|
||||||
} catch (Exception e) {
|
|
||||||
// might happen when calling on a network interface that does not exists
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
address = getFirstNonLoopbackAddress(intf, ip_version);
|
|
||||||
if (address != null) {
|
|
||||||
return address;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (list.isEmpty()) {
|
||||||
return null;
|
throw new IllegalArgumentException("No up-and-running loopback interfaces found, got " + getInterfaces());
|
||||||
}
|
|
||||||
|
|
||||||
private static List<NetworkInterface> getInterfaces() throws SocketException {
|
|
||||||
Enumeration intfs = NetworkInterface.getNetworkInterfaces();
|
|
||||||
|
|
||||||
List<NetworkInterface> intfsList = Lists.newArrayList();
|
|
||||||
while (intfs.hasMoreElements()) {
|
|
||||||
intfsList.add((NetworkInterface) intfs.nextElement());
|
|
||||||
}
|
}
|
||||||
|
sortAddresses(list);
|
||||||
sortInterfaces(intfsList);
|
return list.toArray(new InetAddress[list.size()]);
|
||||||
return intfsList;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void sortInterfaces(List<NetworkInterface> intfsList) {
|
/** Returns addresses for the first non-loopback interface that is up. */
|
||||||
// order by index, assuming first ones are more interesting
|
public static InetAddress[] getFirstNonLoopbackAddresses() throws SocketException {
|
||||||
CollectionUtil.timSort(intfsList, new Comparator<NetworkInterface>() {
|
List<InetAddress> list = new ArrayList<>();
|
||||||
@Override
|
for (NetworkInterface intf : getInterfaces()) {
|
||||||
public int compare(NetworkInterface o1, NetworkInterface o2) {
|
if (intf.isLoopback() == false && intf.isUp()) {
|
||||||
return Integer.compare (o1.getIndex(), o2.getIndex());
|
list.addAll(Collections.list(intf.getInetAddresses()));
|
||||||
}
|
break;
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the first non-loopback address on the given interface on the current host.
|
|
||||||
*
|
|
||||||
* @param intf the interface to be checked
|
|
||||||
* @param ipVersion Constraint on IP version of address to be returned, 4 or 6
|
|
||||||
*/
|
|
||||||
public static InetAddress getFirstNonLoopbackAddress(NetworkInterface intf, StackType ipVersion) throws SocketException {
|
|
||||||
if (intf == null)
|
|
||||||
throw new IllegalArgumentException("Network interface pointer is null");
|
|
||||||
|
|
||||||
for (Enumeration addresses = intf.getInetAddresses(); addresses.hasMoreElements(); ) {
|
|
||||||
InetAddress address = (InetAddress) addresses.nextElement();
|
|
||||||
if (!address.isLoopbackAddress()) {
|
|
||||||
if ((address instanceof Inet4Address && ipVersion == StackType.IPv4) ||
|
|
||||||
(address instanceof Inet6Address && ipVersion == StackType.IPv6))
|
|
||||||
return address;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return null;
|
if (list.isEmpty()) {
|
||||||
}
|
throw new IllegalArgumentException("No up-and-running non-loopback interfaces found, got " + getInterfaces());
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the first address with the proper ipVersion on the given interface on the current host.
|
|
||||||
*
|
|
||||||
* @param intf the interface to be checked
|
|
||||||
* @param ipVersion Constraint on IP version of address to be returned, 4 or 6
|
|
||||||
*/
|
|
||||||
public static InetAddress getFirstAddress(NetworkInterface intf, StackType ipVersion) throws SocketException {
|
|
||||||
if (intf == null)
|
|
||||||
throw new IllegalArgumentException("Network interface pointer is null");
|
|
||||||
|
|
||||||
for (Enumeration addresses = intf.getInetAddresses(); addresses.hasMoreElements(); ) {
|
|
||||||
InetAddress address = (InetAddress) addresses.nextElement();
|
|
||||||
if ((address instanceof Inet4Address && ipVersion == StackType.IPv4) ||
|
|
||||||
(address instanceof Inet6Address && ipVersion == StackType.IPv6))
|
|
||||||
return address;
|
|
||||||
}
|
}
|
||||||
return null;
|
sortAddresses(list);
|
||||||
|
return list.toArray(new InetAddress[list.size()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Returns addresses for the given interface (it must be marked up) */
|
||||||
* A function to check if an interface supports an IP version (i.e has addresses
|
public static InetAddress[] getAddressesForInterface(String name) throws SocketException {
|
||||||
* defined for that IP version).
|
NetworkInterface intf = NetworkInterface.getByName(name);
|
||||||
*
|
if (intf == null) {
|
||||||
* @param intf
|
throw new IllegalArgumentException("No interface named '" + name + "' found, got " + getInterfaces());
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public static boolean interfaceHasIPAddresses(NetworkInterface intf, StackType ipVersion) throws SocketException, UnknownHostException {
|
|
||||||
boolean supportsVersion = false;
|
|
||||||
if (intf != null) {
|
|
||||||
// get all the InetAddresses defined on the interface
|
|
||||||
Enumeration addresses = intf.getInetAddresses();
|
|
||||||
while (addresses != null && addresses.hasMoreElements()) {
|
|
||||||
// get the next InetAddress for the current interface
|
|
||||||
InetAddress address = (InetAddress) addresses.nextElement();
|
|
||||||
|
|
||||||
// check if we find an address of correct version
|
|
||||||
if ((address instanceof Inet4Address && (ipVersion == StackType.IPv4)) ||
|
|
||||||
(address instanceof Inet6Address && (ipVersion == StackType.IPv6))) {
|
|
||||||
supportsVersion = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new UnknownHostException("network interface not found");
|
|
||||||
}
|
}
|
||||||
return supportsVersion;
|
if (!intf.isUp()) {
|
||||||
|
throw new IllegalArgumentException("Interface '" + name + "' is not up and running");
|
||||||
|
}
|
||||||
|
List<InetAddress> list = Collections.list(intf.getInetAddresses());
|
||||||
|
if (list.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("Interface '" + name + "' has no internet addresses");
|
||||||
|
}
|
||||||
|
sortAddresses(list);
|
||||||
|
return list.toArray(new InetAddress[list.size()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** Returns addresses for the given host, sorted by order of preference */
|
||||||
* Tries to determine the type of IP stack from the available interfaces and their addresses and from the
|
public static InetAddress[] getAllByName(String host) throws UnknownHostException {
|
||||||
* system properties (java.net.preferIPv4Stack and java.net.preferIPv6Addresses)
|
InetAddress addresses[] = InetAddress.getAllByName(host);
|
||||||
*
|
sortAddresses(Arrays.asList(addresses));
|
||||||
* @return StackType.IPv4 for an IPv4 only stack, StackYTypeIPv6 for an IPv6 only stack, and StackType.Unknown
|
return addresses;
|
||||||
* if the type cannot be detected
|
|
||||||
*/
|
|
||||||
public static StackType getIpStackType() {
|
|
||||||
boolean isIPv4StackAvailable = isStackAvailable(true);
|
|
||||||
boolean isIPv6StackAvailable = isStackAvailable(false);
|
|
||||||
|
|
||||||
// if only IPv4 stack available
|
|
||||||
if (isIPv4StackAvailable && !isIPv6StackAvailable) {
|
|
||||||
return StackType.IPv4;
|
|
||||||
}
|
|
||||||
// if only IPv6 stack available
|
|
||||||
else if (isIPv6StackAvailable && !isIPv4StackAvailable) {
|
|
||||||
return StackType.IPv6;
|
|
||||||
}
|
|
||||||
// if dual stack
|
|
||||||
else if (isIPv4StackAvailable && isIPv6StackAvailable) {
|
|
||||||
// get the System property which records user preference for a stack on a dual stack machine
|
|
||||||
if (Boolean.getBoolean(IPv4_SETTING)) // has preference over java.net.preferIPv6Addresses
|
|
||||||
return StackType.IPv4;
|
|
||||||
if (Boolean.getBoolean(IPv6_SETTING))
|
|
||||||
return StackType.IPv6;
|
|
||||||
return StackType.IPv6;
|
|
||||||
}
|
|
||||||
return StackType.Unknown;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Returns only the IPV4 addresses in {@code addresses} */
|
||||||
public static boolean isStackAvailable(boolean ipv4) {
|
public static InetAddress[] filterIPV4(InetAddress addresses[]) {
|
||||||
Collection<InetAddress> allAddrs = getAllAvailableAddresses();
|
List<InetAddress> list = new ArrayList<>();
|
||||||
for (InetAddress addr : allAddrs)
|
for (InetAddress address : addresses) {
|
||||||
if (ipv4 && addr instanceof Inet4Address || (!ipv4 && addr instanceof Inet6Address))
|
if (address instanceof Inet4Address) {
|
||||||
return true;
|
list.add(address);
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns all the available interfaces, including first level sub interfaces.
|
|
||||||
*/
|
|
||||||
public static List<NetworkInterface> getAllAvailableInterfaces() throws SocketException {
|
|
||||||
List<NetworkInterface> allInterfaces = new ArrayList<>();
|
|
||||||
for (Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); interfaces.hasMoreElements(); ) {
|
|
||||||
NetworkInterface intf = interfaces.nextElement();
|
|
||||||
allInterfaces.add(intf);
|
|
||||||
|
|
||||||
Enumeration<NetworkInterface> subInterfaces = intf.getSubInterfaces();
|
|
||||||
if (subInterfaces != null && subInterfaces.hasMoreElements()) {
|
|
||||||
while (subInterfaces.hasMoreElements()) {
|
|
||||||
allInterfaces.add(subInterfaces.nextElement());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sortInterfaces(allInterfaces);
|
if (list.isEmpty()) {
|
||||||
return allInterfaces;
|
throw new IllegalArgumentException("No ipv4 addresses found in " + Arrays.toString(addresses));
|
||||||
}
|
|
||||||
|
|
||||||
public static Collection<InetAddress> getAllAvailableAddresses() {
|
|
||||||
// we want consistent order here.
|
|
||||||
final Set<InetAddress> retval = new TreeSet<>(new Comparator<InetAddress>() {
|
|
||||||
BytesRef left = new BytesRef();
|
|
||||||
BytesRef right = new BytesRef();
|
|
||||||
@Override
|
|
||||||
public int compare(InetAddress o1, InetAddress o2) {
|
|
||||||
return set(left, o1).compareTo(set(right, o1));
|
|
||||||
}
|
|
||||||
|
|
||||||
private BytesRef set(BytesRef ref, InetAddress addr) {
|
|
||||||
ref.bytes = addr.getAddress();
|
|
||||||
ref.offset = 0;
|
|
||||||
ref.length = ref.bytes.length;
|
|
||||||
return ref;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
for (NetworkInterface intf : getInterfaces()) {
|
|
||||||
Enumeration<InetAddress> addrs = intf.getInetAddresses();
|
|
||||||
while (addrs.hasMoreElements())
|
|
||||||
retval.add(addrs.nextElement());
|
|
||||||
}
|
|
||||||
} catch (SocketException e) {
|
|
||||||
logger.warn("Failed to derive all available interfaces", e);
|
|
||||||
}
|
}
|
||||||
|
return list.toArray(new InetAddress[list.size()]);
|
||||||
return retval;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Returns only the IPV6 addresses in {@code addresses} */
|
||||||
private NetworkUtils() {
|
public static InetAddress[] filterIPV6(InetAddress addresses[]) {
|
||||||
|
List<InetAddress> list = new ArrayList<>();
|
||||||
|
for (InetAddress address : addresses) {
|
||||||
|
if (address instanceof Inet6Address) {
|
||||||
|
list.add(address);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (list.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("No ipv6 addresses found in " + Arrays.toString(addresses));
|
||||||
|
}
|
||||||
|
return list.toArray(new InetAddress[list.size()]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,6 +44,21 @@ public class DummyTransportAddress implements TransportAddress {
|
||||||
return other == INSTANCE;
|
return other == INSTANCE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHost() {
|
||||||
|
return "dummy";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getAddress() {
|
||||||
|
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPort() {
|
||||||
|
return 42;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DummyTransportAddress readFrom(StreamInput in) throws IOException {
|
public DummyTransportAddress readFrom(StreamInput in) throws IOException {
|
||||||
return INSTANCE;
|
return INSTANCE;
|
||||||
|
|
|
@ -30,7 +30,7 @@ import java.net.InetSocketAddress;
|
||||||
/**
|
/**
|
||||||
* A transport address used for IP socket address (wraps {@link java.net.InetSocketAddress}).
|
* A transport address used for IP socket address (wraps {@link java.net.InetSocketAddress}).
|
||||||
*/
|
*/
|
||||||
public class InetSocketTransportAddress implements TransportAddress {
|
public final class InetSocketTransportAddress implements TransportAddress {
|
||||||
|
|
||||||
private static boolean resolveAddress = false;
|
private static boolean resolveAddress = false;
|
||||||
|
|
||||||
|
@ -92,6 +92,21 @@ public class InetSocketTransportAddress implements TransportAddress {
|
||||||
address.getAddress().equals(((InetSocketTransportAddress) other).address.getAddress());
|
address.getAddress().equals(((InetSocketTransportAddress) other).address.getAddress());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHost() {
|
||||||
|
return address.getHostName();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getAddress() {
|
||||||
|
return address.getAddress().getHostAddress();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPort() {
|
||||||
|
return address.getPort();
|
||||||
|
}
|
||||||
|
|
||||||
public InetSocketAddress address() {
|
public InetSocketAddress address() {
|
||||||
return this.address;
|
return this.address;
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.io.IOException;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class LocalTransportAddress implements TransportAddress {
|
public final class LocalTransportAddress implements TransportAddress {
|
||||||
|
|
||||||
public static final LocalTransportAddress PROTO = new LocalTransportAddress("_na");
|
public static final LocalTransportAddress PROTO = new LocalTransportAddress("_na");
|
||||||
|
|
||||||
|
@ -57,6 +57,21 @@ public class LocalTransportAddress implements TransportAddress {
|
||||||
return other instanceof LocalTransportAddress && id.equals(((LocalTransportAddress) other).id);
|
return other instanceof LocalTransportAddress && id.equals(((LocalTransportAddress) other).id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getHost() {
|
||||||
|
return "local";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getAddress() {
|
||||||
|
return "0.0.0.0"; // see https://en.wikipedia.org/wiki/0.0.0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPort() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LocalTransportAddress readFrom(StreamInput in) throws IOException {
|
public LocalTransportAddress readFrom(StreamInput in) throws IOException {
|
||||||
return new LocalTransportAddress(in);
|
return new LocalTransportAddress(in);
|
||||||
|
|
|
@ -28,7 +28,24 @@ import org.elasticsearch.common.io.stream.Writeable;
|
||||||
*/
|
*/
|
||||||
public interface TransportAddress extends Writeable<TransportAddress> {
|
public interface TransportAddress extends Writeable<TransportAddress> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the host string for this transport address
|
||||||
|
*/
|
||||||
|
String getHost();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the address string for this transport address
|
||||||
|
*/
|
||||||
|
String getAddress();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the port of this transport address if applicable
|
||||||
|
*/
|
||||||
|
int getPort();
|
||||||
|
|
||||||
short uniqueAddressTypeId();
|
short uniqueAddressTypeId();
|
||||||
|
|
||||||
boolean sameHost(TransportAddress other);
|
boolean sameHost(TransportAddress other);
|
||||||
|
|
||||||
|
public String toString();
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,21 +31,18 @@ import java.util.*;
|
||||||
* all extensions by a single name and ensures that extensions are not registered
|
* all extensions by a single name and ensures that extensions are not registered
|
||||||
* more than once.
|
* more than once.
|
||||||
*/
|
*/
|
||||||
public abstract class ExtensionPoint<T> {
|
public abstract class ExtensionPoint {
|
||||||
protected final String name;
|
protected final String name;
|
||||||
protected final Class<T> extensionClass;
|
|
||||||
protected final Class<?>[] singletons;
|
protected final Class<?>[] singletons;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new extension point
|
* Creates a new extension point
|
||||||
*
|
*
|
||||||
* @param name the human readable underscore case name of the extension point. This is used in error messages etc.
|
* @param name the human readable underscore case name of the extension point. This is used in error messages etc.
|
||||||
* @param extensionClass the base class that should be extended
|
|
||||||
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||||
*/
|
*/
|
||||||
public ExtensionPoint(String name, Class<T> extensionClass, Class<?>... singletons) {
|
public ExtensionPoint(String name, Class<?>... singletons) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.extensionClass = extensionClass;
|
|
||||||
this.singletons = singletons;
|
this.singletons = singletons;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,29 +59,30 @@ public abstract class ExtensionPoint<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Subclasses can bind their type, map or set exentions here.
|
* Subclasses can bind their type, map or set extensions here.
|
||||||
*/
|
*/
|
||||||
protected abstract void bindExtensions(Binder binder);
|
protected abstract void bindExtensions(Binder binder);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A map based extension point which allows to register keyed implementations ie. parsers or some kind of strategies.
|
* A map based extension point which allows to register keyed implementations ie. parsers or some kind of strategies.
|
||||||
*/
|
*/
|
||||||
public static class MapExtensionPoint<T> extends ExtensionPoint<T> {
|
public static class ClassMap<T> extends ExtensionPoint {
|
||||||
|
protected final Class<T> extensionClass;
|
||||||
private final Map<String, Class<? extends T>> extensions = new HashMap<>();
|
private final Map<String, Class<? extends T>> extensions = new HashMap<>();
|
||||||
private final Set<String> reservedKeys;
|
private final Set<String> reservedKeys;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new {@link org.elasticsearch.common.util.ExtensionPoint.MapExtensionPoint}
|
* Creates a new {@link ClassMap}
|
||||||
*
|
*
|
||||||
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
||||||
* @param extensionClass the base class that should be extended
|
* @param extensionClass the base class that should be extended
|
||||||
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||||
* @param reservedKeys a set of reserved keys by internal implementations
|
* @param reservedKeys a set of reserved keys by internal implementations
|
||||||
*/
|
*/
|
||||||
public MapExtensionPoint(String name, Class<T> extensionClass, Set<String> reservedKeys, Class<?>... singletons) {
|
public ClassMap(String name, Class<T> extensionClass, Set<String> reservedKeys, Class<?>... singletons) {
|
||||||
super(name, extensionClass, singletons);
|
super(name, singletons);
|
||||||
|
this.extensionClass = extensionClass;
|
||||||
this.reservedKeys = reservedKeys;
|
this.reservedKeys = reservedKeys;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -118,13 +116,13 @@ public abstract class ExtensionPoint<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Type extension point which basically allows to registerd keyed extensions like {@link org.elasticsearch.common.util.ExtensionPoint.MapExtensionPoint}
|
* A Type extension point which basically allows to registerd keyed extensions like {@link ClassMap}
|
||||||
* but doesn't instantiate and bind all the registered key value pairs but instead replace a singleton based on a given setting via {@link #bindType(Binder, Settings, String, String)}
|
* but doesn't instantiate and bind all the registered key value pairs but instead replace a singleton based on a given setting via {@link #bindType(Binder, Settings, String, String)}
|
||||||
* Note: {@link #bind(Binder)} is not supported by this class
|
* Note: {@link #bind(Binder)} is not supported by this class
|
||||||
*/
|
*/
|
||||||
public static final class TypeExtensionPoint<T> extends MapExtensionPoint<T> {
|
public static final class SelectedType<T> extends ClassMap<T> {
|
||||||
|
|
||||||
public TypeExtensionPoint(String name, Class<T> extensionClass) {
|
public SelectedType(String name, Class<T> extensionClass) {
|
||||||
super(name, extensionClass, Collections.EMPTY_SET);
|
super(name, extensionClass, Collections.EMPTY_SET);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,18 +154,20 @@ public abstract class ExtensionPoint<T> {
|
||||||
/**
|
/**
|
||||||
* A set based extension point which allows to register extended classes that might be used to chain additional functionality etc.
|
* A set based extension point which allows to register extended classes that might be used to chain additional functionality etc.
|
||||||
*/
|
*/
|
||||||
public final static class SetExtensionPoint<T> extends ExtensionPoint<T> {
|
public final static class ClassSet<T> extends ExtensionPoint {
|
||||||
|
protected final Class<T> extensionClass;
|
||||||
private final Set<Class<? extends T>> extensions = new HashSet<>();
|
private final Set<Class<? extends T>> extensions = new HashSet<>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new {@link org.elasticsearch.common.util.ExtensionPoint.SetExtensionPoint}
|
* Creates a new {@link ClassSet}
|
||||||
*
|
*
|
||||||
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
* @param name the human readable underscore case name of the extension poing. This is used in error messages etc.
|
||||||
* @param extensionClass the base class that should be extended
|
* @param extensionClass the base class that should be extended
|
||||||
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||||
*/
|
*/
|
||||||
public SetExtensionPoint(String name, Class<T> extensionClass, Class<?>... singletons) {
|
public ClassSet(String name, Class<T> extensionClass, Class<?>... singletons) {
|
||||||
super(name, extensionClass, singletons);
|
super(name, singletons);
|
||||||
|
this.extensionClass = extensionClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -191,4 +191,46 @@ public abstract class ExtensionPoint<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A an instance of a map, mapping one instance value to another. Both key and value are instances, not classes
|
||||||
|
* like with other extension points.
|
||||||
|
*/
|
||||||
|
public final static class InstanceMap<K, V> extends ExtensionPoint {
|
||||||
|
private final Map<K, V> map = new HashMap<>();
|
||||||
|
private final Class<K> keyType;
|
||||||
|
private final Class<V> valueType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new {@link ClassSet}
|
||||||
|
*
|
||||||
|
* @param name the human readable underscore case name of the extension point. This is used in error messages.
|
||||||
|
* @param singletons a list of singletons to bind with this extension point - these are bound in {@link #bind(Binder)}
|
||||||
|
*/
|
||||||
|
public InstanceMap(String name, Class<K> keyType, Class<V> valueType, Class<?>... singletons) {
|
||||||
|
super(name, singletons);
|
||||||
|
this.keyType = keyType;
|
||||||
|
this.valueType = valueType;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers a mapping from {@param key} to {@param value}
|
||||||
|
*
|
||||||
|
* @throws IllegalArgumentException iff the key is already registered
|
||||||
|
*/
|
||||||
|
public final void registerExtension(K key, V value) {
|
||||||
|
V old = map.put(key, value);
|
||||||
|
if (old != null) {
|
||||||
|
throw new IllegalArgumentException("Cannot register [" + this.name + "] with key [" + key + "] to [" + value + "], already registered to [" + old + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void bindExtensions(Binder binder) {
|
||||||
|
MapBinder<K, V> mapBinder = MapBinder.newMapBinder(binder, keyType, valueType);
|
||||||
|
for (Map.Entry<K, V> entry : map.entrySet()) {
|
||||||
|
mapBinder.addBinding(entry.getKey()).toInstance(entry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,7 +131,9 @@ public class MulticastZenPing extends AbstractLifecycleComponent<ZenPing> implem
|
||||||
boolean deferToInterface = settings.getAsBoolean("discovery.zen.ping.multicast.defer_group_to_set_interface", Constants.MAC_OS_X);
|
boolean deferToInterface = settings.getAsBoolean("discovery.zen.ping.multicast.defer_group_to_set_interface", Constants.MAC_OS_X);
|
||||||
multicastChannel = MulticastChannel.getChannel(nodeName(), shared,
|
multicastChannel = MulticastChannel.getChannel(nodeName(), shared,
|
||||||
new MulticastChannel.Config(port, group, bufferSize, ttl,
|
new MulticastChannel.Config(port, group, bufferSize, ttl,
|
||||||
networkService.resolvePublishHostAddress(address),
|
// don't use publish address, the use case for that is e.g. a firewall or proxy and
|
||||||
|
// may not even be bound to an interface on this machine! use the first bound address.
|
||||||
|
networkService.resolveBindHostAddress(address)[0],
|
||||||
deferToInterface),
|
deferToInterface),
|
||||||
new Receiver());
|
new Receiver());
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
|
|
|
@ -51,6 +51,10 @@ import org.jboss.netty.handler.timeout.ReadTimeoutException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.net.SocketAddress;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
@ -128,7 +132,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
||||||
|
|
||||||
protected volatile BoundTransportAddress boundAddress;
|
protected volatile BoundTransportAddress boundAddress;
|
||||||
|
|
||||||
protected volatile Channel serverChannel;
|
protected volatile List<Channel> serverChannels = new ArrayList<>();
|
||||||
|
|
||||||
protected OpenChannelsHandler serverOpenChannels;
|
protected OpenChannelsHandler serverOpenChannels;
|
||||||
|
|
||||||
|
@ -243,33 +247,18 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
||||||
serverBootstrap.setOption("child.reuseAddress", reuseAddress);
|
serverBootstrap.setOption("child.reuseAddress", reuseAddress);
|
||||||
|
|
||||||
// Bind and start to accept incoming connections.
|
// Bind and start to accept incoming connections.
|
||||||
InetAddress hostAddressX;
|
InetAddress hostAddresses[];
|
||||||
try {
|
try {
|
||||||
hostAddressX = networkService.resolveBindHostAddress(bindHost);
|
hostAddresses = networkService.resolveBindHostAddress(bindHost);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new BindHttpException("Failed to resolve host [" + bindHost + "]", e);
|
throw new BindHttpException("Failed to resolve host [" + bindHost + "]", e);
|
||||||
}
|
}
|
||||||
final InetAddress hostAddress = hostAddressX;
|
|
||||||
|
for (InetAddress address : hostAddresses) {
|
||||||
PortsRange portsRange = new PortsRange(port);
|
bindAddress(address);
|
||||||
final AtomicReference<Exception> lastException = new AtomicReference<>();
|
|
||||||
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
|
|
||||||
@Override
|
|
||||||
public boolean onPortNumber(int portNumber) {
|
|
||||||
try {
|
|
||||||
serverChannel = serverBootstrap.bind(new InetSocketAddress(hostAddress, portNumber));
|
|
||||||
} catch (Exception e) {
|
|
||||||
lastException.set(e);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!success) {
|
|
||||||
throw new BindHttpException("Failed to bind to [" + port + "]", lastException.get());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
InetSocketAddress boundAddress = (InetSocketAddress) serverChannel.getLocalAddress();
|
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(0).getLocalAddress();
|
||||||
InetSocketAddress publishAddress;
|
InetSocketAddress publishAddress;
|
||||||
if (0 == publishPort) {
|
if (0 == publishPort) {
|
||||||
publishPort = boundAddress.getPort();
|
publishPort = boundAddress.getPort();
|
||||||
|
@ -281,12 +270,42 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
||||||
}
|
}
|
||||||
this.boundAddress = new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress));
|
this.boundAddress = new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void bindAddress(final InetAddress hostAddress) {
|
||||||
|
PortsRange portsRange = new PortsRange(port);
|
||||||
|
final AtomicReference<Exception> lastException = new AtomicReference<>();
|
||||||
|
final AtomicReference<SocketAddress> boundSocket = new AtomicReference<>();
|
||||||
|
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
|
||||||
|
@Override
|
||||||
|
public boolean onPortNumber(int portNumber) {
|
||||||
|
try {
|
||||||
|
synchronized (serverChannels) {
|
||||||
|
Channel channel = serverBootstrap.bind(new InetSocketAddress(hostAddress, portNumber));
|
||||||
|
serverChannels.add(channel);
|
||||||
|
boundSocket.set(channel.getLocalAddress());
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
lastException.set(e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!success) {
|
||||||
|
throw new BindHttpException("Failed to bind to [" + port + "]", lastException.get());
|
||||||
|
}
|
||||||
|
logger.info("Bound http to address [{}]", boundSocket.get());
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doStop() {
|
protected void doStop() {
|
||||||
if (serverChannel != null) {
|
synchronized (serverChannels) {
|
||||||
serverChannel.close().awaitUninterruptibly();
|
if (serverChannels != null) {
|
||||||
serverChannel = null;
|
for (Channel channel : serverChannels) {
|
||||||
|
channel.close().awaitUninterruptibly();
|
||||||
|
}
|
||||||
|
serverChannels = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (serverOpenChannels != null) {
|
if (serverOpenChannels != null) {
|
||||||
|
|
|
@ -20,8 +20,8 @@
|
||||||
package org.elasticsearch.index.cache;
|
package org.elasticsearch.index.cache;
|
||||||
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
import org.elasticsearch.common.inject.Scopes;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.ExtensionPoint;
|
||||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||||
import org.elasticsearch.index.cache.query.QueryCache;
|
import org.elasticsearch.index.cache.query.QueryCache;
|
||||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||||
|
@ -35,24 +35,24 @@ public class IndexCacheModule extends AbstractModule {
|
||||||
// for test purposes only
|
// for test purposes only
|
||||||
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
|
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
|
||||||
|
|
||||||
private final Settings settings;
|
private final Settings indexSettings;
|
||||||
|
private final ExtensionPoint.SelectedType<QueryCache> queryCaches;
|
||||||
|
|
||||||
public IndexCacheModule(Settings settings) {
|
public IndexCacheModule(Settings settings) {
|
||||||
this.settings = settings;
|
this.indexSettings = settings;
|
||||||
|
this.queryCaches = new ExtensionPoint.SelectedType<>("query_cache", QueryCache.class);
|
||||||
|
|
||||||
|
registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache.class);
|
||||||
|
registerQueryCache(NONE_QUERY_CACHE, NoneQueryCache.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void registerQueryCache(String name, Class<? extends QueryCache> clazz) {
|
||||||
|
queryCaches.registerExtension(name, clazz);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void configure() {
|
protected void configure() {
|
||||||
String queryCacheType = settings.get(QUERY_CACHE_TYPE, INDEX_QUERY_CACHE);
|
queryCaches.bindType(binder(), indexSettings, QUERY_CACHE_TYPE, INDEX_QUERY_CACHE);
|
||||||
Class<? extends QueryCache> queryCacheImpl;
|
|
||||||
if (queryCacheType.equals(INDEX_QUERY_CACHE)) {
|
|
||||||
queryCacheImpl = IndexQueryCache.class;
|
|
||||||
} else if (queryCacheType.equals(NONE_QUERY_CACHE)) {
|
|
||||||
queryCacheImpl = NoneQueryCache.class;
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("Unknown QueryCache type [" + queryCacheType + "]");
|
|
||||||
}
|
|
||||||
bind(QueryCache.class).to(queryCacheImpl).in(Scopes.SINGLETON);
|
|
||||||
bind(BitsetFilterCache.class).asEagerSingleton();
|
bind(BitsetFilterCache.class).asEagerSingleton();
|
||||||
bind(IndexCache.class).asEagerSingleton();
|
bind(IndexCache.class).asEagerSingleton();
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,10 +36,12 @@ public interface IndexSearcherWrapper {
|
||||||
DirectoryReader wrap(DirectoryReader reader);
|
DirectoryReader wrap(DirectoryReader reader);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param searcher The provided index searcher to be wrapped to add custom functionality
|
* @param engineConfig The engine config which can be used to get the query cache and query cache policy from
|
||||||
|
* when creating a new index searcher
|
||||||
|
* @param searcher The provided index searcher to be wrapped to add custom functionality
|
||||||
* @return a new index searcher wrapping the provided index searcher or if no wrapping was performed
|
* @return a new index searcher wrapping the provided index searcher or if no wrapping was performed
|
||||||
* the provided index searcher
|
* the provided index searcher
|
||||||
*/
|
*/
|
||||||
IndexSearcher wrap(IndexSearcher searcher) throws EngineException;
|
IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -77,7 +77,7 @@ public final class IndexSearcherWrappingService {
|
||||||
// TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point
|
// TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point
|
||||||
// For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten
|
// For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten
|
||||||
// This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times
|
// This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times
|
||||||
IndexSearcher indexSearcher = wrapper.wrap(innerIndexSearcher);
|
IndexSearcher indexSearcher = wrapper.wrap(engineConfig, innerIndexSearcher);
|
||||||
if (reader == engineSearcher.reader() && indexSearcher == innerIndexSearcher) {
|
if (reader == engineSearcher.reader() && indexSearcher == innerIndexSearcher) {
|
||||||
return engineSearcher;
|
return engineSearcher;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -101,8 +101,7 @@ public class DocumentMapperParser {
|
||||||
.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser())
|
.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser())
|
||||||
.put(TypeParsers.MULTI_FIELD_CONTENT_TYPE, TypeParsers.multiFieldConverterTypeParser)
|
.put(TypeParsers.MULTI_FIELD_CONTENT_TYPE, TypeParsers.multiFieldConverterTypeParser)
|
||||||
.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser())
|
.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser())
|
||||||
.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser())
|
.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser());
|
||||||
.put(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser());
|
|
||||||
|
|
||||||
if (ShapesAvailability.JTS_AVAILABLE) {
|
if (ShapesAvailability.JTS_AVAILABLE) {
|
||||||
typeParsersBuilder.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
|
typeParsersBuilder.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
|
||||||
|
|
|
@ -84,10 +84,6 @@ public final class MapperBuilders {
|
||||||
return new LongFieldMapper.Builder(name);
|
return new LongFieldMapper.Builder(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Murmur3FieldMapper.Builder murmur3Field(String name) {
|
|
||||||
return new Murmur3FieldMapper.Builder(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static FloatFieldMapper.Builder floatField(String name) {
|
public static FloatFieldMapper.Builder floatField(String name) {
|
||||||
return new FloatFieldMapper.Builder(name);
|
return new FloatFieldMapper.Builder(name);
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,6 +85,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
public static final String LON_SUFFIX = "." + LON;
|
public static final String LON_SUFFIX = "." + LON;
|
||||||
public static final String GEOHASH = "geohash";
|
public static final String GEOHASH = "geohash";
|
||||||
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
|
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
|
||||||
|
public static final String IGNORE_MALFORMED = "ignore_malformed";
|
||||||
|
public static final String COERCE = "coerce";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Defaults {
|
public static class Defaults {
|
||||||
|
@ -93,10 +95,9 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
public static final boolean ENABLE_GEOHASH = false;
|
public static final boolean ENABLE_GEOHASH = false;
|
||||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||||
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
||||||
public static final boolean NORMALIZE_LAT = true;
|
|
||||||
public static final boolean NORMALIZE_LON = true;
|
public static final boolean IGNORE_MALFORMED = false;
|
||||||
public static final boolean VALIDATE_LAT = true;
|
public static final boolean COERCE = false;
|
||||||
public static final boolean VALIDATE_LON = true;
|
|
||||||
|
|
||||||
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
|
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
|
||||||
|
|
||||||
|
@ -215,6 +216,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
@Override
|
@Override
|
||||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||||
Builder builder = geoPointField(name);
|
Builder builder = geoPointField(name);
|
||||||
|
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
parseField(builder, name, node, parserContext);
|
parseField(builder, name, node, parserContext);
|
||||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||||
Map.Entry<String, Object> entry = iterator.next();
|
Map.Entry<String, Object> entry = iterator.next();
|
||||||
|
@ -245,25 +247,42 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString()));
|
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString()));
|
||||||
}
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("validate")) {
|
} else if (fieldName.equals(Names.IGNORE_MALFORMED)) {
|
||||||
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().coerce == false) {
|
||||||
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("validate_lon")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate")) {
|
||||||
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().ignoreMalformed == false) {
|
||||||
|
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
|
iterator.remove();
|
||||||
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lon")) {
|
||||||
|
if (builder.fieldType().ignoreMalformed() == false) {
|
||||||
|
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("validate_lat")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lat")) {
|
||||||
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().ignoreMalformed == false) {
|
||||||
|
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("normalize")) {
|
} else if (fieldName.equals(Names.COERCE)) {
|
||||||
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().coerce == true) {
|
||||||
|
builder.fieldType().ignoreMalformed = true;
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("normalize_lat")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize")) {
|
||||||
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("normalize_lon")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lat")) {
|
||||||
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
iterator.remove();
|
||||||
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lon")) {
|
||||||
|
if (builder.fieldType().coerce == false) {
|
||||||
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
|
@ -281,10 +300,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
|
|
||||||
private MappedFieldType latFieldType;
|
private MappedFieldType latFieldType;
|
||||||
private MappedFieldType lonFieldType;
|
private MappedFieldType lonFieldType;
|
||||||
private boolean validateLon = true;
|
private boolean ignoreMalformed = false;
|
||||||
private boolean validateLat = true;
|
private boolean coerce = false;
|
||||||
private boolean normalizeLon = true;
|
|
||||||
private boolean normalizeLat = true;
|
|
||||||
|
|
||||||
public GeoPointFieldType() {}
|
public GeoPointFieldType() {}
|
||||||
|
|
||||||
|
@ -295,10 +312,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
|
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
|
||||||
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
|
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
|
||||||
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
|
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
|
||||||
this.validateLon = ref.validateLon;
|
this.coerce = ref.coerce;
|
||||||
this.validateLat = ref.validateLat;
|
this.ignoreMalformed = ref.ignoreMalformed;
|
||||||
this.normalizeLon = ref.normalizeLon;
|
|
||||||
this.normalizeLat = ref.normalizeLat;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -312,10 +327,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
GeoPointFieldType that = (GeoPointFieldType) o;
|
GeoPointFieldType that = (GeoPointFieldType) o;
|
||||||
return geohashPrecision == that.geohashPrecision &&
|
return geohashPrecision == that.geohashPrecision &&
|
||||||
geohashPrefixEnabled == that.geohashPrefixEnabled &&
|
geohashPrefixEnabled == that.geohashPrefixEnabled &&
|
||||||
validateLon == that.validateLon &&
|
coerce == that.coerce &&
|
||||||
validateLat == that.validateLat &&
|
ignoreMalformed == that.ignoreMalformed &&
|
||||||
normalizeLon == that.normalizeLon &&
|
|
||||||
normalizeLat == that.normalizeLat &&
|
|
||||||
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
|
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
|
||||||
java.util.Objects.equals(latFieldType, that.latFieldType) &&
|
java.util.Objects.equals(latFieldType, that.latFieldType) &&
|
||||||
java.util.Objects.equals(lonFieldType, that.lonFieldType);
|
java.util.Objects.equals(lonFieldType, that.lonFieldType);
|
||||||
|
@ -323,7 +336,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, lonFieldType, validateLon, validateLat, normalizeLon, normalizeLat);
|
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType,
|
||||||
|
lonFieldType, coerce, ignoreMalformed);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -347,22 +361,10 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
|
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
|
||||||
}
|
}
|
||||||
if (normalizeLat() != other.normalizeLat()) {
|
if (isLatLonEnabled() && other.isLatLonEnabled() &&
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lat");
|
|
||||||
}
|
|
||||||
if (normalizeLon() != other.normalizeLon()) {
|
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lon");
|
|
||||||
}
|
|
||||||
if (isLatLonEnabled() &&
|
|
||||||
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
|
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
|
||||||
}
|
}
|
||||||
if (validateLat() != other.validateLat()) {
|
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different validate_lat");
|
|
||||||
}
|
|
||||||
if (validateLon() != other.validateLon()) {
|
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different validate_lon");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isGeohashEnabled() {
|
public boolean isGeohashEnabled() {
|
||||||
|
@ -406,40 +408,22 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
this.lonFieldType = lonFieldType;
|
this.lonFieldType = lonFieldType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean validateLon() {
|
public boolean coerce() {
|
||||||
return validateLon;
|
return this.coerce;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setValidateLon(boolean validateLon) {
|
public void setCoerce(boolean coerce) {
|
||||||
checkIfFrozen();
|
checkIfFrozen();
|
||||||
this.validateLon = validateLon;
|
this.coerce = coerce;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean validateLat() {
|
public boolean ignoreMalformed() {
|
||||||
return validateLat;
|
return this.ignoreMalformed;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setValidateLat(boolean validateLat) {
|
public void setIgnoreMalformed(boolean ignoreMalformed) {
|
||||||
checkIfFrozen();
|
checkIfFrozen();
|
||||||
this.validateLat = validateLat;
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
}
|
|
||||||
|
|
||||||
public boolean normalizeLon() {
|
|
||||||
return normalizeLon;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setNormalizeLon(boolean normalizeLon) {
|
|
||||||
checkIfFrozen();
|
|
||||||
this.normalizeLon = normalizeLon;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean normalizeLat() {
|
|
||||||
return normalizeLat;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setNormalizeLat(boolean normalizeLat) {
|
|
||||||
checkIfFrozen();
|
|
||||||
this.normalizeLat = normalizeLat;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -586,7 +570,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
private final StringFieldMapper geohashMapper;
|
private final StringFieldMapper geohashMapper;
|
||||||
|
|
||||||
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) {
|
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
|
||||||
|
MultiFields multiFields) {
|
||||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
|
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
|
||||||
this.pathType = pathType;
|
this.pathType = pathType;
|
||||||
this.latMapper = latMapper;
|
this.latMapper = latMapper;
|
||||||
|
@ -680,21 +665,22 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
}
|
}
|
||||||
|
|
||||||
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
|
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
|
||||||
if (fieldType().normalizeLat() || fieldType().normalizeLon()) {
|
if (fieldType().ignoreMalformed == false) {
|
||||||
GeoUtils.normalizePoint(point, fieldType().normalizeLat(), fieldType().normalizeLon());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fieldType().validateLat()) {
|
|
||||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (fieldType().validateLon()) {
|
|
||||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (fieldType().coerce) {
|
||||||
|
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
|
||||||
|
// thus this extra step can be skipped
|
||||||
|
// LUCENE WATCH: This will be folded back into Lucene's GeoPointField
|
||||||
|
GeoUtils.normalizePoint(point, true, true);
|
||||||
|
}
|
||||||
|
|
||||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||||
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
|
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
|
||||||
context.doc().add(field);
|
context.doc().add(field);
|
||||||
|
@ -755,33 +741,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
|
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
|
||||||
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
|
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT || fieldType().validateLon() != Defaults.VALIDATE_LON) {
|
if (includeDefaults || fieldType().coerce != Defaults.COERCE) {
|
||||||
if (fieldType().validateLat() && fieldType().validateLon()) {
|
builder.field(Names.COERCE, fieldType().coerce);
|
||||||
builder.field("validate", true);
|
|
||||||
} else if (!fieldType().validateLat() && !fieldType().validateLon()) {
|
|
||||||
builder.field("validate", false);
|
|
||||||
} else {
|
|
||||||
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT) {
|
|
||||||
builder.field("validate_lat", fieldType().validateLat());
|
|
||||||
}
|
|
||||||
if (includeDefaults || fieldType().validateLon() != Defaults.VALIDATE_LON) {
|
|
||||||
builder.field("validate_lon", fieldType().validateLon());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
|
if (includeDefaults || fieldType().ignoreMalformed != Defaults.IGNORE_MALFORMED) {
|
||||||
if (fieldType().normalizeLat() && fieldType().normalizeLon()) {
|
builder.field(Names.IGNORE_MALFORMED, fieldType().ignoreMalformed);
|
||||||
builder.field("normalize", true);
|
|
||||||
} else if (!fieldType().normalizeLat() && !fieldType().normalizeLon()) {
|
|
||||||
builder.field("normalize", false);
|
|
||||||
} else {
|
|
||||||
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT) {
|
|
||||||
builder.field("normalize_lat", fieldType().normalizeLat());
|
|
||||||
}
|
|
||||||
if (includeDefaults || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
|
|
||||||
builder.field("normalize_lon", fieldType().normalizeLon());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -812,5 +776,4 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
||||||
return new BytesRef(bytes);
|
return new BytesRef(bytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,6 +41,8 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
|
||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
private String type;
|
private String type;
|
||||||
|
private Boolean coerce;
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoBoundingBoxQueryBuilder(String name) {
|
public GeoBoundingBoxQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
|
@ -134,6 +136,16 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoBoundingBoxQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoBoundingBoxQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
|
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
|
||||||
* to `memory`.
|
* to `memory`.
|
||||||
|
@ -169,6 +181,12 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
|
||||||
if (type != null) {
|
if (type != null) {
|
||||||
builder.field("type", type);
|
builder.field("type", type);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,12 +21,12 @@ package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
|
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
|
||||||
|
@ -81,7 +81,9 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
String queryName = null;
|
String queryName = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
boolean normalize = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
|
|
||||||
GeoPoint sparse = new GeoPoint();
|
GeoPoint sparse = new GeoPoint();
|
||||||
|
|
||||||
|
@ -137,10 +139,15 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_name".equals(currentFieldName)) {
|
if ("_name".equals(currentFieldName)) {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalize = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
} else if ("type".equals(currentFieldName)) {
|
} else if ("type".equals(currentFieldName)) {
|
||||||
type = parser.text();
|
type = parser.text();
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
|
@ -150,8 +157,24 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
||||||
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
|
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
|
||||||
final GeoPoint bottomRight = new GeoPoint(bottom, right);
|
final GeoPoint bottomRight = new GeoPoint(bottom, right);
|
||||||
|
|
||||||
if (normalize) {
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
// Special case: if the difference bettween the left and right is 360 and the right is greater than the left, we are asking for
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
|
if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (topLeft.lon() > 180.0 || topLeft.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME);
|
||||||
|
}
|
||||||
|
if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
|
||||||
// the complete longitude range so need to set longitude to the complete longditude range
|
// the complete longitude range so need to set longitude to the complete longditude range
|
||||||
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
|
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
|
||||||
GeoUtils.normalizePoint(topLeft, true, !completeLonRange);
|
GeoUtils.normalizePoint(topLeft, true, !completeLonRange);
|
||||||
|
|
|
@ -44,6 +44,10 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
|
||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
|
|
||||||
|
private Boolean coerce;
|
||||||
|
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoDistanceQueryBuilder(String name) {
|
public GeoDistanceQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
@ -97,6 +101,16 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoDistanceQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoDistanceQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(GeoDistanceQueryParser.NAME);
|
builder.startObject(GeoDistanceQueryParser.NAME);
|
||||||
|
@ -115,6 +129,12 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
|
||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
builder.field("_name", queryName);
|
builder.field("_name", queryName);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
|
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||||
|
@ -71,8 +71,9 @@ public class GeoDistanceQueryParser implements QueryParser {
|
||||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||||
String optimizeBbox = "memory";
|
String optimizeBbox = "memory";
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
|
@ -125,9 +126,13 @@ public class GeoDistanceQueryParser implements QueryParser {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
||||||
optimizeBbox = parser.textOrNull();
|
optimizeBbox = parser.textOrNull();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
point.resetFromString(parser.text());
|
point.resetFromString(parser.text());
|
||||||
fieldName = currentFieldName;
|
fieldName = currentFieldName;
|
||||||
|
@ -135,6 +140,20 @@ public class GeoDistanceQueryParser implements QueryParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
|
}
|
||||||
|
|
||||||
if (vDistance == null) {
|
if (vDistance == null) {
|
||||||
throw new QueryParsingException(parseContext, "geo_distance requires 'distance' to be specified");
|
throw new QueryParsingException(parseContext, "geo_distance requires 'distance' to be specified");
|
||||||
} else if (vDistance instanceof Number) {
|
} else if (vDistance instanceof Number) {
|
||||||
|
@ -144,10 +163,6 @@ public class GeoDistanceQueryParser implements QueryParser {
|
||||||
}
|
}
|
||||||
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
|
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
|
||||||
}
|
|
||||||
|
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||||
|
|
|
@ -46,6 +46,10 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
|
||||||
|
|
||||||
private String optimizeBbox;
|
private String optimizeBbox;
|
||||||
|
|
||||||
|
private Boolean coerce;
|
||||||
|
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoDistanceRangeQueryBuilder(String name) {
|
public GeoDistanceRangeQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
@ -125,6 +129,16 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoDistanceRangeQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoDistanceRangeQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
|
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
|
||||||
*/
|
*/
|
||||||
|
@ -154,6 +168,12 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
|
||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
builder.field("_name", queryName);
|
builder.field("_name", queryName);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
|
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||||
|
@ -73,8 +73,9 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
||||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||||
String optimizeBbox = "memory";
|
String optimizeBbox = "memory";
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
|
@ -155,9 +156,13 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
||||||
optimizeBbox = parser.textOrNull();
|
optimizeBbox = parser.textOrNull();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
point.resetFromString(parser.text());
|
point.resetFromString(parser.text());
|
||||||
fieldName = currentFieldName;
|
fieldName = currentFieldName;
|
||||||
|
@ -165,6 +170,20 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
|
}
|
||||||
|
|
||||||
Double from = null;
|
Double from = null;
|
||||||
Double to = null;
|
Double to = null;
|
||||||
if (vFrom != null) {
|
if (vFrom != null) {
|
||||||
|
@ -184,10 +203,6 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
||||||
to = geoDistance.normalize(to, DistanceUnit.DEFAULT);
|
to = geoDistance.normalize(to, DistanceUnit.DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
|
||||||
}
|
|
||||||
|
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||||
|
|
|
@ -38,6 +38,10 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
|
||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
|
|
||||||
|
private Boolean coerce;
|
||||||
|
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoPolygonQueryBuilder(String name) {
|
public GeoPolygonQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
@ -70,6 +74,16 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoPolygonQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoPolygonQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(GeoPolygonQueryParser.NAME);
|
builder.startObject(GeoPolygonQueryParser.NAME);
|
||||||
|
@ -85,6 +99,12 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
|
||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
builder.field("_name", queryName);
|
builder.field("_name", queryName);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,13 +22,13 @@ package org.elasticsearch.index.query;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
||||||
|
@ -70,9 +70,9 @@ public class GeoPolygonQueryParser implements QueryParser {
|
||||||
|
|
||||||
List<GeoPoint> shell = Lists.newArrayList();
|
List<GeoPoint> shell = Lists.newArrayList();
|
||||||
|
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
String queryName = null;
|
String queryName = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
|
@ -108,9 +108,13 @@ public class GeoPolygonQueryParser implements QueryParser {
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_name".equals(currentFieldName)) {
|
if ("_name".equals(currentFieldName)) {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
|
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
|
@ -134,9 +138,21 @@ public class GeoPolygonQueryParser implements QueryParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
for (GeoPoint point : shell) {
|
for (GeoPoint point : shell) {
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
for (GeoPoint point : shell) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -258,6 +258,7 @@ public class HasChildQueryParser implements QueryParser {
|
||||||
String joinField = ParentFieldMapper.joinField(parentType);
|
String joinField = ParentFieldMapper.joinField(parentType);
|
||||||
IndexReader indexReader = searchContext.searcher().getIndexReader();
|
IndexReader indexReader = searchContext.searcher().getIndexReader();
|
||||||
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
|
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
|
||||||
|
indexSearcher.setQueryCache(null);
|
||||||
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
|
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
|
||||||
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
|
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
|
||||||
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
|
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
|
||||||
|
|
|
@ -68,10 +68,6 @@ public class NotQueryParser implements QueryParser {
|
||||||
// its the filter, and the name is the field
|
// its the filter, and the name is the field
|
||||||
query = parseContext.parseInnerFilter(currentFieldName);
|
query = parseContext.parseInnerFilter(currentFieldName);
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
|
||||||
queryFound = true;
|
|
||||||
// its the filter, and the name is the field
|
|
||||||
query = parseContext.parseInnerFilter(currentFieldName);
|
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_name".equals(currentFieldName)) {
|
if ("_name".equals(currentFieldName)) {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
|
|
|
@ -38,8 +38,6 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui
|
||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
|
|
||||||
private String execution;
|
|
||||||
|
|
||||||
private float boost = -1;
|
private float boost = -1;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -118,17 +116,6 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui
|
||||||
this.values = values;
|
this.values = values;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets the execution mode for the terms filter. Cane be either "plain", "bool"
|
|
||||||
* "and". Defaults to "plain".
|
|
||||||
* @deprecated elasticsearch now makes better decisions on its own
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public TermsQueryBuilder execution(String execution) {
|
|
||||||
this.execution = execution;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the minimum number of matches across the provided terms. Defaults to <tt>1</tt>.
|
* Sets the minimum number of matches across the provided terms. Defaults to <tt>1</tt>.
|
||||||
* @deprecated use [bool] query instead
|
* @deprecated use [bool] query instead
|
||||||
|
@ -168,10 +155,6 @@ public class TermsQueryBuilder extends QueryBuilder implements BoostableQueryBui
|
||||||
builder.startObject(TermsQueryParser.NAME);
|
builder.startObject(TermsQueryParser.NAME);
|
||||||
builder.field(name, values);
|
builder.field(name, values);
|
||||||
|
|
||||||
if (execution != null) {
|
|
||||||
builder.field("execution", execution);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (minimumShouldMatch != null) {
|
if (minimumShouldMatch != null) {
|
||||||
builder.field("minimum_should_match", minimumShouldMatch);
|
builder.field("minimum_should_match", minimumShouldMatch);
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,11 +52,9 @@ public class TermsQueryParser implements QueryParser {
|
||||||
public static final String NAME = "terms";
|
public static final String NAME = "terms";
|
||||||
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match").withAllDeprecated("Use [bool] query instead");
|
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match").withAllDeprecated("Use [bool] query instead");
|
||||||
private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
|
private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
|
||||||
|
private static final ParseField EXECUTION_FIELD = new ParseField("execution").withAllDeprecated("execution is deprecated and has no effect");
|
||||||
private Client client;
|
private Client client;
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public static final String EXECUTION_KEY = "execution";
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public TermsQueryParser() {
|
public TermsQueryParser() {
|
||||||
}
|
}
|
||||||
|
@ -141,7 +139,7 @@ public class TermsQueryParser implements QueryParser {
|
||||||
throw new QueryParsingException(parseContext, "[terms] query lookup element requires specifying the path");
|
throw new QueryParsingException(parseContext, "[terms] query lookup element requires specifying the path");
|
||||||
}
|
}
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if (EXECUTION_KEY.equals(currentFieldName)) {
|
if (parseContext.parseFieldMatcher().match(currentFieldName, EXECUTION_FIELD)) {
|
||||||
// ignore
|
// ignore
|
||||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SHOULD_MATCH_FIELD)) {
|
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SHOULD_MATCH_FIELD)) {
|
||||||
if (minShouldMatch != null) {
|
if (minShouldMatch != null) {
|
||||||
|
|
|
@ -19,15 +19,17 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices;
|
package org.elasticsearch.indices;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import org.apache.lucene.analysis.hunspell.Dictionary;
|
||||||
|
|
||||||
import org.elasticsearch.action.update.UpdateHelper;
|
import org.elasticsearch.action.update.UpdateHelper;
|
||||||
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
|
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
|
||||||
|
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
import org.elasticsearch.common.inject.Module;
|
|
||||||
import org.elasticsearch.common.inject.SpawnModules;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
import org.elasticsearch.common.util.ExtensionPoint;
|
||||||
|
import org.elasticsearch.index.query.*;
|
||||||
|
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser;
|
||||||
|
import org.elasticsearch.indices.analysis.HunspellService;
|
||||||
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||||
import org.elasticsearch.indices.cache.request.IndicesRequestCache;
|
import org.elasticsearch.indices.cache.request.IndicesRequestCache;
|
||||||
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
|
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
|
||||||
|
@ -35,7 +37,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener;
|
||||||
import org.elasticsearch.indices.flush.SyncedFlushService;
|
import org.elasticsearch.indices.flush.SyncedFlushService;
|
||||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||||
import org.elasticsearch.indices.recovery.RecoverySource;
|
import org.elasticsearch.indices.recovery.RecoverySource;
|
||||||
import org.elasticsearch.indices.recovery.RecoveryTarget;
|
import org.elasticsearch.indices.recovery.RecoveryTarget;
|
||||||
|
@ -44,27 +46,95 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
|
||||||
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Configures classes and services that are shared by indices on each node.
|
||||||
*/
|
*/
|
||||||
public class IndicesModule extends AbstractModule implements SpawnModules {
|
public class IndicesModule extends AbstractModule {
|
||||||
|
|
||||||
private final Settings settings;
|
private final Settings settings;
|
||||||
|
|
||||||
|
private final ExtensionPoint.ClassSet<QueryParser> queryParsers
|
||||||
|
= new ExtensionPoint.ClassSet<>("query_parser", QueryParser.class);
|
||||||
|
private final ExtensionPoint.InstanceMap<String, Dictionary> hunspellDictionaries
|
||||||
|
= new ExtensionPoint.InstanceMap<>("hunspell_dictionary", String.class, Dictionary.class);
|
||||||
|
|
||||||
public IndicesModule(Settings settings) {
|
public IndicesModule(Settings settings) {
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
|
registerBuiltinQueryParsers();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void registerBuiltinQueryParsers() {
|
||||||
|
registerQueryParser(MatchQueryParser.class);
|
||||||
|
registerQueryParser(MultiMatchQueryParser.class);
|
||||||
|
registerQueryParser(NestedQueryParser.class);
|
||||||
|
registerQueryParser(HasChildQueryParser.class);
|
||||||
|
registerQueryParser(HasParentQueryParser.class);
|
||||||
|
registerQueryParser(DisMaxQueryParser.class);
|
||||||
|
registerQueryParser(IdsQueryParser.class);
|
||||||
|
registerQueryParser(MatchAllQueryParser.class);
|
||||||
|
registerQueryParser(QueryStringQueryParser.class);
|
||||||
|
registerQueryParser(BoostingQueryParser.class);
|
||||||
|
registerQueryParser(BoolQueryParser.class);
|
||||||
|
registerQueryParser(TermQueryParser.class);
|
||||||
|
registerQueryParser(TermsQueryParser.class);
|
||||||
|
registerQueryParser(FuzzyQueryParser.class);
|
||||||
|
registerQueryParser(RegexpQueryParser.class);
|
||||||
|
registerQueryParser(RangeQueryParser.class);
|
||||||
|
registerQueryParser(PrefixQueryParser.class);
|
||||||
|
registerQueryParser(WildcardQueryParser.class);
|
||||||
|
registerQueryParser(FilteredQueryParser.class);
|
||||||
|
registerQueryParser(ConstantScoreQueryParser.class);
|
||||||
|
registerQueryParser(SpanTermQueryParser.class);
|
||||||
|
registerQueryParser(SpanNotQueryParser.class);
|
||||||
|
registerQueryParser(SpanWithinQueryParser.class);
|
||||||
|
registerQueryParser(SpanContainingQueryParser.class);
|
||||||
|
registerQueryParser(FieldMaskingSpanQueryParser.class);
|
||||||
|
registerQueryParser(SpanFirstQueryParser.class);
|
||||||
|
registerQueryParser(SpanNearQueryParser.class);
|
||||||
|
registerQueryParser(SpanOrQueryParser.class);
|
||||||
|
registerQueryParser(MoreLikeThisQueryParser.class);
|
||||||
|
registerQueryParser(WrapperQueryParser.class);
|
||||||
|
registerQueryParser(IndicesQueryParser.class);
|
||||||
|
registerQueryParser(CommonTermsQueryParser.class);
|
||||||
|
registerQueryParser(SpanMultiTermQueryParser.class);
|
||||||
|
registerQueryParser(FunctionScoreQueryParser.class);
|
||||||
|
registerQueryParser(SimpleQueryStringParser.class);
|
||||||
|
registerQueryParser(TemplateQueryParser.class);
|
||||||
|
registerQueryParser(TypeQueryParser.class);
|
||||||
|
registerQueryParser(LimitQueryParser.class);
|
||||||
|
registerQueryParser(ScriptQueryParser.class);
|
||||||
|
registerQueryParser(GeoDistanceQueryParser.class);
|
||||||
|
registerQueryParser(GeoDistanceRangeQueryParser.class);
|
||||||
|
registerQueryParser(GeoBoundingBoxQueryParser.class);
|
||||||
|
registerQueryParser(GeohashCellQuery.Parser.class);
|
||||||
|
registerQueryParser(GeoPolygonQueryParser.class);
|
||||||
|
registerQueryParser(QueryFilterParser.class);
|
||||||
|
registerQueryParser(FQueryFilterParser.class);
|
||||||
|
registerQueryParser(AndQueryParser.class);
|
||||||
|
registerQueryParser(OrQueryParser.class);
|
||||||
|
registerQueryParser(NotQueryParser.class);
|
||||||
|
registerQueryParser(ExistsQueryParser.class);
|
||||||
|
registerQueryParser(MissingQueryParser.class);
|
||||||
|
|
||||||
|
if (ShapesAvailability.JTS_AVAILABLE) {
|
||||||
|
registerQueryParser(GeoShapeQueryParser.class);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public void registerQueryParser(Class<? extends QueryParser> queryParser) {
|
||||||
public Iterable<? extends Module> spawnModules() {
|
queryParsers.registerExtension(queryParser);
|
||||||
return ImmutableList.of(new IndicesQueriesModule(), new IndicesAnalysisModule());
|
}
|
||||||
|
|
||||||
|
public void registerHunspellDictionary(String name, Dictionary dictionary) {
|
||||||
|
hunspellDictionaries.registerExtension(name, dictionary);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void configure() {
|
protected void configure() {
|
||||||
|
bindQueryParsersExtension();
|
||||||
|
bindHunspellExtension();
|
||||||
|
|
||||||
bind(IndicesLifecycle.class).to(InternalIndicesLifecycle.class).asEagerSingleton();
|
bind(IndicesLifecycle.class).to(InternalIndicesLifecycle.class).asEagerSingleton();
|
||||||
|
|
||||||
bind(IndicesService.class).asEagerSingleton();
|
bind(IndicesService.class).asEagerSingleton();
|
||||||
|
|
||||||
bind(RecoverySettings.class).asEagerSingleton();
|
bind(RecoverySettings.class).asEagerSingleton();
|
||||||
bind(RecoveryTarget.class).asEagerSingleton();
|
bind(RecoveryTarget.class).asEagerSingleton();
|
||||||
bind(RecoverySource.class).asEagerSingleton();
|
bind(RecoverySource.class).asEagerSingleton();
|
||||||
|
@ -80,7 +150,17 @@ public class IndicesModule extends AbstractModule implements SpawnModules {
|
||||||
bind(IndicesWarmer.class).asEagerSingleton();
|
bind(IndicesWarmer.class).asEagerSingleton();
|
||||||
bind(UpdateHelper.class).asEagerSingleton();
|
bind(UpdateHelper.class).asEagerSingleton();
|
||||||
bind(MetaDataIndexUpgradeService.class).asEagerSingleton();
|
bind(MetaDataIndexUpgradeService.class).asEagerSingleton();
|
||||||
|
|
||||||
bind(IndicesFieldDataCacheListener.class).asEagerSingleton();
|
bind(IndicesFieldDataCacheListener.class).asEagerSingleton();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected void bindQueryParsersExtension() {
|
||||||
|
queryParsers.bind(binder());
|
||||||
|
bind(IndicesQueriesRegistry.class).asEagerSingleton();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void bindHunspellExtension() {
|
||||||
|
hunspellDictionaries.bind(binder());
|
||||||
|
bind(HunspellService.class).asEagerSingleton();
|
||||||
|
bind(IndicesAnalysisService.class).asEagerSingleton();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,47 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.indices.analysis;
|
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
import org.apache.lucene.analysis.hunspell.Dictionary;
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
|
||||||
import org.elasticsearch.common.inject.multibindings.MapBinder;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class IndicesAnalysisModule extends AbstractModule {
|
|
||||||
|
|
||||||
private final Map<String, Dictionary> hunspellDictionaries = Maps.newHashMap();
|
|
||||||
|
|
||||||
public void addHunspellDictionary(String lang, Dictionary dictionary) {
|
|
||||||
hunspellDictionaries.put(lang, dictionary);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void configure() {
|
|
||||||
bind(IndicesAnalysisService.class).asEagerSingleton();
|
|
||||||
|
|
||||||
MapBinder<String, Dictionary> dictionariesBinder = MapBinder.newMapBinder(binder(), String.class, Dictionary.class);
|
|
||||||
for (Map.Entry<String, Dictionary> entry : hunspellDictionaries.entrySet()) {
|
|
||||||
dictionariesBinder.addBinding(entry.getKey()).toInstance(entry.getValue());
|
|
||||||
}
|
|
||||||
bind(HunspellService.class).asEagerSingleton();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,105 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.indices.query;
|
|
||||||
|
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import org.elasticsearch.common.geo.ShapesAvailability;
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
|
||||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
|
||||||
import org.elasticsearch.index.query.*;
|
|
||||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser;
|
|
||||||
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class IndicesQueriesModule extends AbstractModule {
|
|
||||||
|
|
||||||
private Set<Class<? extends QueryParser>> queryParsersClasses = Sets.newHashSet();
|
|
||||||
|
|
||||||
public synchronized IndicesQueriesModule addQuery(Class<? extends QueryParser> queryParser) {
|
|
||||||
queryParsersClasses.add(queryParser);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void configure() {
|
|
||||||
bind(IndicesQueriesRegistry.class).asEagerSingleton();
|
|
||||||
|
|
||||||
Multibinder<QueryParser> qpBinders = Multibinder.newSetBinder(binder(), QueryParser.class);
|
|
||||||
for (Class<? extends QueryParser> queryParser : queryParsersClasses) {
|
|
||||||
qpBinders.addBinding().to(queryParser).asEagerSingleton();
|
|
||||||
}
|
|
||||||
qpBinders.addBinding().to(MatchQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(MultiMatchQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(NestedQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(HasChildQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(HasParentQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(DisMaxQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(IdsQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(MatchAllQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(QueryStringQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(BoostingQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(BoolQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(TermQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(TermsQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(FuzzyQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(RegexpQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(RangeQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(PrefixQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(WildcardQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(FilteredQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(ConstantScoreQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanTermQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanNotQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanWithinQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanContainingQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(FieldMaskingSpanQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanFirstQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanNearQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanOrQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(MoreLikeThisQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(WrapperQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(IndicesQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(CommonTermsQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SpanMultiTermQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(FunctionScoreQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(SimpleQueryStringParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(TemplateQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(TypeQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(LimitQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(TermsQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(ScriptQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(GeoDistanceQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(GeoDistanceRangeQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(GeoBoundingBoxQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(GeohashCellQuery.Parser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(GeoPolygonQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(QueryFilterParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(FQueryFilterParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(AndQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(OrQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(NotQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(ExistsQueryParser.class).asEagerSingleton();
|
|
||||||
qpBinders.addBinding().to(MissingQueryParser.class).asEagerSingleton();
|
|
||||||
|
|
||||||
if (ShapesAvailability.JTS_AVAILABLE) {
|
|
||||||
qpBinders.addBinding().to(GeoShapeQueryParser.class).asEagerSingleton();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -293,7 +293,7 @@ public class RecoverySourceHandler {
|
||||||
store.incRef();
|
store.incRef();
|
||||||
final StoreFileMetaData md = recoverySourceMetadata.get(name);
|
final StoreFileMetaData md = recoverySourceMetadata.get(name);
|
||||||
try (final IndexInput indexInput = store.directory().openInput(name, IOContext.READONCE)) {
|
try (final IndexInput indexInput = store.directory().openInput(name, IOContext.READONCE)) {
|
||||||
final int BUFFER_SIZE = (int) recoverySettings.fileChunkSize().bytes();
|
final int BUFFER_SIZE = (int) Math.max(1, recoverySettings.fileChunkSize().bytes()); // at least one!
|
||||||
final byte[] buf = new byte[BUFFER_SIZE];
|
final byte[] buf = new byte[BUFFER_SIZE];
|
||||||
boolean shouldCompressRequest = recoverySettings.compress();
|
boolean shouldCompressRequest = recoverySettings.compress();
|
||||||
if (CompressorFactory.isCompressed(indexInput)) {
|
if (CompressorFactory.isCompressed(indexInput)) {
|
||||||
|
|
|
@ -226,6 +226,9 @@ public class RecoveryStatus extends AbstractRefCounted {
|
||||||
public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException {
|
public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException {
|
||||||
ensureRefCount();
|
ensureRefCount();
|
||||||
String tempFileName = getTempNameForFile(fileName);
|
String tempFileName = getTempNameForFile(fileName);
|
||||||
|
if (tempFileNames.containsKey(tempFileName)) {
|
||||||
|
throw new IllegalStateException("output for file [" + fileName + "] has already been created");
|
||||||
|
}
|
||||||
// add first, before it's created
|
// add first, before it's created
|
||||||
tempFileNames.put(tempFileName, fileName);
|
tempFileNames.put(tempFileName, fileName);
|
||||||
IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT);
|
IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT);
|
||||||
|
|
|
@ -119,7 +119,7 @@ public class NodeService extends AbstractComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
public NodeInfo info(boolean settings, boolean os, boolean process, boolean jvm, boolean threadPool,
|
public NodeInfo info(boolean settings, boolean os, boolean process, boolean jvm, boolean threadPool,
|
||||||
boolean network, boolean transport, boolean http, boolean plugin) {
|
boolean transport, boolean http, boolean plugin) {
|
||||||
return new NodeInfo(version, Build.CURRENT, discovery.localNode(), serviceAttributes,
|
return new NodeInfo(version, Build.CURRENT, discovery.localNode(), serviceAttributes,
|
||||||
settings ? this.settings : null,
|
settings ? this.settings : null,
|
||||||
os ? monitorService.osService().info() : null,
|
os ? monitorService.osService().info() : null,
|
||||||
|
@ -149,7 +149,7 @@ public class NodeService extends AbstractComponent {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public NodeStats stats(CommonStatsFlags indices, boolean os, boolean process, boolean jvm, boolean threadPool, boolean network,
|
public NodeStats stats(CommonStatsFlags indices, boolean os, boolean process, boolean jvm, boolean threadPool,
|
||||||
boolean fs, boolean transport, boolean http, boolean circuitBreaker,
|
boolean fs, boolean transport, boolean http, boolean circuitBreaker,
|
||||||
boolean script) {
|
boolean script) {
|
||||||
// for indices stats we want to include previous allocated shards stats as well (it will
|
// for indices stats we want to include previous allocated shards stats as well (it will
|
||||||
|
|
|
@ -75,18 +75,19 @@ public class PluginManager {
|
||||||
|
|
||||||
static final ImmutableSet<String> OFFICIAL_PLUGINS = ImmutableSet.<String>builder()
|
static final ImmutableSet<String> OFFICIAL_PLUGINS = ImmutableSet.<String>builder()
|
||||||
.add(
|
.add(
|
||||||
"elasticsearch-analysis-icu",
|
"analysis-icu",
|
||||||
"elasticsearch-analysis-kuromoji",
|
"analysis-kuromoji",
|
||||||
"elasticsearch-analysis-phonetic",
|
"analysis-phonetic",
|
||||||
"elasticsearch-analysis-smartcn",
|
"analysis-smartcn",
|
||||||
"elasticsearch-analysis-stempel",
|
"analysis-stempel",
|
||||||
"elasticsearch-cloud-aws",
|
"cloud-aws",
|
||||||
"elasticsearch-cloud-azure",
|
"cloud-azure",
|
||||||
"elasticsearch-cloud-gce",
|
"cloud-gce",
|
||||||
"elasticsearch-delete-by-query",
|
"delete-by-query",
|
||||||
"elasticsearch-lang-javascript",
|
"lang-javascript",
|
||||||
"elasticsearch-lang-python",
|
"lang-python",
|
||||||
"elasticsearch-mapper-size"
|
"mapper-murmur3",
|
||||||
|
"mapper-size"
|
||||||
).build();
|
).build();
|
||||||
|
|
||||||
private final Environment environment;
|
private final Environment environment;
|
||||||
|
@ -162,7 +163,7 @@ public class PluginManager {
|
||||||
terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e));
|
terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (PluginHandle.isOfficialPlugin(pluginHandle.repo, pluginHandle.user, pluginHandle.version)) {
|
if (PluginHandle.isOfficialPlugin(pluginHandle.name, pluginHandle.user, pluginHandle.version)) {
|
||||||
checkForOfficialPlugins(pluginHandle.name);
|
checkForOfficialPlugins(pluginHandle.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -437,43 +438,41 @@ public class PluginManager {
|
||||||
*/
|
*/
|
||||||
static class PluginHandle {
|
static class PluginHandle {
|
||||||
|
|
||||||
final String name;
|
|
||||||
final String version;
|
final String version;
|
||||||
final String user;
|
final String user;
|
||||||
final String repo;
|
final String name;
|
||||||
|
|
||||||
PluginHandle(String name, String version, String user, String repo) {
|
PluginHandle(String name, String version, String user) {
|
||||||
this.name = name;
|
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.user = user;
|
this.user = user;
|
||||||
this.repo = repo;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
List<URL> urls() {
|
List<URL> urls() {
|
||||||
List<URL> urls = new ArrayList<>();
|
List<URL> urls = new ArrayList<>();
|
||||||
if (version != null) {
|
if (version != null) {
|
||||||
// Elasticsearch new download service uses groupId org.elasticsearch.plugins from 2.0.0
|
// Elasticsearch new download service uses groupId org.elasticsearch.plugin from 2.0.0
|
||||||
if (user == null) {
|
if (user == null) {
|
||||||
// TODO Update to https
|
// TODO Update to https
|
||||||
if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
|
if (!Strings.isNullOrEmpty(System.getProperty(PROPERTY_SUPPORT_STAGING_URLS))) {
|
||||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", Build.CURRENT.hashShort(), repo, version, repo, version));
|
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip", version, Build.CURRENT.hashShort(), name, version, name, version));
|
||||||
}
|
}
|
||||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip", repo, version, repo, version));
|
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip", name, version, name, version));
|
||||||
} else {
|
} else {
|
||||||
// Elasticsearch old download service
|
// Elasticsearch old download service
|
||||||
// TODO Update to https
|
// TODO Update to https
|
||||||
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, repo, version));
|
addUrl(urls, String.format(Locale.ROOT, "http://download.elastic.co/%1$s/%2$s/%2$s-%3$s.zip", user, name, version));
|
||||||
// Maven central repository
|
// Maven central repository
|
||||||
addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version));
|
addUrl(urls, String.format(Locale.ROOT, "http://search.maven.org/remotecontent?filepath=%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
|
||||||
// Sonatype repository
|
// Sonatype repository
|
||||||
addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), repo, version));
|
addUrl(urls, String.format(Locale.ROOT, "https://oss.sonatype.org/service/local/repositories/releases/content/%1$s/%2$s/%3$s/%2$s-%3$s.zip", user.replace('.', '/'), name, version));
|
||||||
// Github repository
|
// Github repository
|
||||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, repo, version));
|
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/%3$s.zip", user, name, version));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (user != null) {
|
if (user != null) {
|
||||||
// Github repository for master branch (assume site)
|
// Github repository for master branch (assume site)
|
||||||
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, repo));
|
addUrl(urls, String.format(Locale.ROOT, "https://github.com/%1$s/%2$s/archive/master.zip", user, name));
|
||||||
}
|
}
|
||||||
return urls;
|
return urls;
|
||||||
}
|
}
|
||||||
|
@ -525,20 +524,11 @@ public class PluginManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
String endname = repo;
|
|
||||||
if (repo.startsWith("elasticsearch-")) {
|
|
||||||
// remove elasticsearch- prefix
|
|
||||||
endname = repo.substring("elasticsearch-".length());
|
|
||||||
} else if (repo.startsWith("es-")) {
|
|
||||||
// remove es- prefix
|
|
||||||
endname = repo.substring("es-".length());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isOfficialPlugin(repo, user, version)) {
|
if (isOfficialPlugin(repo, user, version)) {
|
||||||
return new PluginHandle(endname, Version.CURRENT.number(), null, repo);
|
return new PluginHandle(repo, Version.CURRENT.number(), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new PluginHandle(endname, version, user, repo);
|
return new PluginHandle(repo, version, user);
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean isOfficialPlugin(String repo, String user, String version) {
|
static boolean isOfficialPlugin(String repo, String user, String version) {
|
||||||
|
|
|
@ -42,7 +42,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||||
public class RestNodesInfoAction extends BaseRestHandler {
|
public class RestNodesInfoAction extends BaseRestHandler {
|
||||||
|
|
||||||
private final SettingsFilter settingsFilter;
|
private final SettingsFilter settingsFilter;
|
||||||
private final static Set<String> ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "network", "os", "plugins", "process", "settings", "thread_pool", "transport");
|
private final static Set<String> ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "os", "plugins", "process", "settings", "thread_pool", "transport");
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
|
public RestNodesInfoAction(Settings settings, RestController controller, Client client, SettingsFilter settingsFilter) {
|
||||||
|
@ -91,7 +91,6 @@ public class RestNodesInfoAction extends BaseRestHandler {
|
||||||
nodesInfoRequest.process(metrics.contains("process"));
|
nodesInfoRequest.process(metrics.contains("process"));
|
||||||
nodesInfoRequest.jvm(metrics.contains("jvm"));
|
nodesInfoRequest.jvm(metrics.contains("jvm"));
|
||||||
nodesInfoRequest.threadPool(metrics.contains("thread_pool"));
|
nodesInfoRequest.threadPool(metrics.contains("thread_pool"));
|
||||||
nodesInfoRequest.network(metrics.contains("network"));
|
|
||||||
nodesInfoRequest.transport(metrics.contains("transport"));
|
nodesInfoRequest.transport(metrics.contains("transport"));
|
||||||
nodesInfoRequest.http(metrics.contains("http"));
|
nodesInfoRequest.http(metrics.contains("http"));
|
||||||
nodesInfoRequest.plugins(metrics.contains("plugins"));
|
nodesInfoRequest.plugins(metrics.contains("plugins"));
|
||||||
|
|
|
@ -69,7 +69,6 @@ public class RestNodesStatsAction extends BaseRestHandler {
|
||||||
nodesStatsRequest.os(metrics.contains("os"));
|
nodesStatsRequest.os(metrics.contains("os"));
|
||||||
nodesStatsRequest.jvm(metrics.contains("jvm"));
|
nodesStatsRequest.jvm(metrics.contains("jvm"));
|
||||||
nodesStatsRequest.threadPool(metrics.contains("thread_pool"));
|
nodesStatsRequest.threadPool(metrics.contains("thread_pool"));
|
||||||
nodesStatsRequest.network(metrics.contains("network"));
|
|
||||||
nodesStatsRequest.fs(metrics.contains("fs"));
|
nodesStatsRequest.fs(metrics.contains("fs"));
|
||||||
nodesStatsRequest.transport(metrics.contains("transport"));
|
nodesStatsRequest.transport(metrics.contains("transport"));
|
||||||
nodesStatsRequest.http(metrics.contains("http"));
|
nodesStatsRequest.http(metrics.contains("http"));
|
||||||
|
|
|
@ -24,6 +24,7 @@ import com.google.common.collect.Multimap;
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.Streamable;
|
import org.elasticsearch.common.io.stream.Streamable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.Aggregations;
|
import org.elasticsearch.search.aggregations.Aggregations;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
|
@ -170,8 +171,25 @@ public abstract class InternalTerms<A extends InternalTerms, B extends InternalT
|
||||||
Multimap<Object, InternalTerms.Bucket> buckets = ArrayListMultimap.create();
|
Multimap<Object, InternalTerms.Bucket> buckets = ArrayListMultimap.create();
|
||||||
long sumDocCountError = 0;
|
long sumDocCountError = 0;
|
||||||
long otherDocCount = 0;
|
long otherDocCount = 0;
|
||||||
|
InternalTerms<A, B> referenceTerms = null;
|
||||||
for (InternalAggregation aggregation : aggregations) {
|
for (InternalAggregation aggregation : aggregations) {
|
||||||
InternalTerms<A, B> terms = (InternalTerms<A, B>) aggregation;
|
InternalTerms<A, B> terms = (InternalTerms<A, B>) aggregation;
|
||||||
|
if (referenceTerms == null && !terms.getClass().equals(UnmappedTerms.class)) {
|
||||||
|
referenceTerms = (InternalTerms<A, B>) aggregation;
|
||||||
|
}
|
||||||
|
if (referenceTerms != null &&
|
||||||
|
!referenceTerms.getClass().equals(terms.getClass()) &&
|
||||||
|
!terms.getClass().equals(UnmappedTerms.class)) {
|
||||||
|
// control gets into this loop when the same field name against which the query is executed
|
||||||
|
// is of different types in different indices.
|
||||||
|
throw new AggregationExecutionException("Merging/Reducing the aggregations failed " +
|
||||||
|
"when computing the aggregation [ Name: " +
|
||||||
|
referenceTerms.getName() + ", Type: " +
|
||||||
|
referenceTerms.type() + " ]" + " because: " +
|
||||||
|
"the field you gave in the aggregation query " +
|
||||||
|
"existed as two different types " +
|
||||||
|
"in two different indices");
|
||||||
|
}
|
||||||
otherDocCount += terms.getSumOfOtherDocCounts();
|
otherDocCount += terms.getSumOfOtherDocCounts();
|
||||||
final long thisAggDocCountError;
|
final long thisAggDocCountError;
|
||||||
if (terms.buckets.size() < this.shardSize || this.order == InternalOrder.TERM_ASC || this.order == InternalOrder.TERM_DESC) {
|
if (terms.buckets.size() < this.shardSize || this.order == InternalOrder.TERM_ASC || this.order == InternalOrder.TERM_DESC) {
|
||||||
|
|
|
@ -56,7 +56,6 @@ import java.util.Map;
|
||||||
public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue {
|
public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue {
|
||||||
|
|
||||||
private final int precision;
|
private final int precision;
|
||||||
private final boolean rehash;
|
|
||||||
private final ValuesSource valuesSource;
|
private final ValuesSource valuesSource;
|
||||||
|
|
||||||
// Expensive to initialize, so we only initialize it when we have an actual value source
|
// Expensive to initialize, so we only initialize it when we have an actual value source
|
||||||
|
@ -66,11 +65,10 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
private Collector collector;
|
private Collector collector;
|
||||||
private ValueFormatter formatter;
|
private ValueFormatter formatter;
|
||||||
|
|
||||||
public CardinalityAggregator(String name, ValuesSource valuesSource, boolean rehash, int precision, ValueFormatter formatter,
|
public CardinalityAggregator(String name, ValuesSource valuesSource, int precision, ValueFormatter formatter,
|
||||||
AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
super(name, context, parent, pipelineAggregators, metaData);
|
super(name, context, parent, pipelineAggregators, metaData);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.rehash = rehash;
|
|
||||||
this.precision = precision;
|
this.precision = precision;
|
||||||
this.counts = valuesSource == null ? null : new HyperLogLogPlusPlus(precision, context.bigArrays(), 1);
|
this.counts = valuesSource == null ? null : new HyperLogLogPlusPlus(precision, context.bigArrays(), 1);
|
||||||
this.formatter = formatter;
|
this.formatter = formatter;
|
||||||
|
@ -85,13 +83,6 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
if (valuesSource == null) {
|
if (valuesSource == null) {
|
||||||
return new EmptyCollector();
|
return new EmptyCollector();
|
||||||
}
|
}
|
||||||
// if rehash is false then the value source is either already hashed, or the user explicitly
|
|
||||||
// requested not to hash the values (perhaps they already hashed the values themselves before indexing the doc)
|
|
||||||
// so we can just work with the original value source as is
|
|
||||||
if (!rehash) {
|
|
||||||
MurmurHash3Values hashValues = MurmurHash3Values.cast(((ValuesSource.Numeric) valuesSource).longValues(ctx));
|
|
||||||
return new DirectCollector(counts, hashValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (valuesSource instanceof ValuesSource.Numeric) {
|
if (valuesSource instanceof ValuesSource.Numeric) {
|
||||||
ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource;
|
ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource;
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||||
|
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
|
import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
@ -35,12 +34,10 @@ import java.util.Map;
|
||||||
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource> {
|
final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource> {
|
||||||
|
|
||||||
private final long precisionThreshold;
|
private final long precisionThreshold;
|
||||||
private final boolean rehash;
|
|
||||||
|
|
||||||
CardinalityAggregatorFactory(String name, ValuesSourceConfig config, long precisionThreshold, boolean rehash) {
|
CardinalityAggregatorFactory(String name, ValuesSourceConfig config, long precisionThreshold) {
|
||||||
super(name, InternalCardinality.TYPE.name(), config);
|
super(name, InternalCardinality.TYPE.name(), config);
|
||||||
this.precisionThreshold = precisionThreshold;
|
this.precisionThreshold = precisionThreshold;
|
||||||
this.rehash = rehash;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private int precision(Aggregator parent) {
|
private int precision(Aggregator parent) {
|
||||||
|
@ -50,16 +47,13 @@ final class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory<V
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator createUnmapped(AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
protected Aggregator createUnmapped(AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new CardinalityAggregator(name, null, true, precision(parent), config.formatter(), context, parent, pipelineAggregators, metaData);
|
return new CardinalityAggregator(name, null, precision(parent), config.formatter(), context, parent, pipelineAggregators, metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext context, Aggregator parent,
|
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext context, Aggregator parent,
|
||||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||||
if (!(valuesSource instanceof ValuesSource.Numeric) && !rehash) {
|
return new CardinalityAggregator(name, valuesSource, precision(parent), config.formatter(), context, parent, pipelineAggregators,
|
||||||
throw new AggregationExecutionException("Turning off rehashing for cardinality aggregation [" + name + "] on non-numeric values in not allowed");
|
|
||||||
}
|
|
||||||
return new CardinalityAggregator(name, valuesSource, rehash, precision(parent), config.formatter(), context, parent, pipelineAggregators,
|
|
||||||
metaData);
|
metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,11 +21,9 @@ package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||||
|
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.core.Murmur3FieldMapper;
|
|
||||||
import org.elasticsearch.search.SearchParseException;
|
import org.elasticsearch.search.SearchParseException;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
|
import org.elasticsearch.search.aggregations.support.ValuesSourceParser;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
|
@ -35,6 +33,7 @@ import java.io.IOException;
|
||||||
public class CardinalityParser implements Aggregator.Parser {
|
public class CardinalityParser implements Aggregator.Parser {
|
||||||
|
|
||||||
private static final ParseField PRECISION_THRESHOLD = new ParseField("precision_threshold");
|
private static final ParseField PRECISION_THRESHOLD = new ParseField("precision_threshold");
|
||||||
|
private static final ParseField REHASH = new ParseField("rehash").withAllDeprecated("no replacement - values will always be rehashed");
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String type() {
|
public String type() {
|
||||||
|
@ -44,10 +43,9 @@ public class CardinalityParser implements Aggregator.Parser {
|
||||||
@Override
|
@Override
|
||||||
public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException {
|
public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException {
|
||||||
|
|
||||||
ValuesSourceParser vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build();
|
ValuesSourceParser<?> vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build();
|
||||||
|
|
||||||
long precisionThreshold = -1;
|
long precisionThreshold = -1;
|
||||||
Boolean rehash = null;
|
|
||||||
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
|
@ -57,8 +55,8 @@ public class CardinalityParser implements Aggregator.Parser {
|
||||||
} else if (vsParser.token(currentFieldName, token, parser)) {
|
} else if (vsParser.token(currentFieldName, token, parser)) {
|
||||||
continue;
|
continue;
|
||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("rehash".equals(currentFieldName)) {
|
if (context.parseFieldMatcher().match(currentFieldName, REHASH)) {
|
||||||
rehash = parser.booleanValue();
|
// ignore
|
||||||
} else if (context.parseFieldMatcher().match(currentFieldName, PRECISION_THRESHOLD)) {
|
} else if (context.parseFieldMatcher().match(currentFieldName, PRECISION_THRESHOLD)) {
|
||||||
precisionThreshold = parser.longValue();
|
precisionThreshold = parser.longValue();
|
||||||
} else {
|
} else {
|
||||||
|
@ -70,15 +68,7 @@ public class CardinalityParser implements Aggregator.Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ValuesSourceConfig<?> config = vsParser.config();
|
return new CardinalityAggregatorFactory(name, vsParser.config(), precisionThreshold);
|
||||||
|
|
||||||
if (rehash == null && config.fieldContext() != null && config.fieldContext().fieldType() instanceof Murmur3FieldMapper.Murmur3FieldType) {
|
|
||||||
rehash = false;
|
|
||||||
} else if (rehash == null) {
|
|
||||||
rehash = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new CardinalityAggregatorFactory(name, config, precisionThreshold, rehash);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -156,6 +156,12 @@ public class AggregationContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
private ValuesSource.Numeric numericField(ValuesSourceConfig<?> config) throws IOException {
|
private ValuesSource.Numeric numericField(ValuesSourceConfig<?> config) throws IOException {
|
||||||
|
|
||||||
|
if (!(config.fieldContext.indexFieldData() instanceof IndexNumericFieldData)) {
|
||||||
|
throw new IllegalArgumentException("Expected numeric type on field [" + config.fieldContext.field() +
|
||||||
|
"], but got [" + config.fieldContext.fieldType().typeName() + "]");
|
||||||
|
}
|
||||||
|
|
||||||
ValuesSource.Numeric dataSource = new ValuesSource.Numeric.FieldData((IndexNumericFieldData) config.fieldContext.indexFieldData());
|
ValuesSource.Numeric dataSource = new ValuesSource.Numeric.FieldData((IndexNumericFieldData) config.fieldContext.indexFieldData());
|
||||||
if (config.script != null) {
|
if (config.script != null) {
|
||||||
dataSource = new ValuesSource.Numeric.WithScript(dataSource, config.script);
|
dataSource = new ValuesSource.Numeric.WithScript(dataSource, config.script);
|
||||||
|
@ -184,6 +190,12 @@ public class AggregationContext {
|
||||||
}
|
}
|
||||||
|
|
||||||
private ValuesSource.GeoPoint geoPointField(ValuesSourceConfig<?> config) throws IOException {
|
private ValuesSource.GeoPoint geoPointField(ValuesSourceConfig<?> config) throws IOException {
|
||||||
|
|
||||||
|
if (!(config.fieldContext.indexFieldData() instanceof IndexGeoPointFieldData)) {
|
||||||
|
throw new IllegalArgumentException("Expected geo_point type on field [" + config.fieldContext.field() +
|
||||||
|
"], but got [" + config.fieldContext.fieldType().typeName() + "]");
|
||||||
|
}
|
||||||
|
|
||||||
return new ValuesSource.GeoPoint.Fielddata((IndexGeoPointFieldData) config.fieldContext.indexFieldData());
|
return new ValuesSource.GeoPoint.Fielddata((IndexGeoPointFieldData) config.fieldContext.indexFieldData());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.*;
|
||||||
/**
|
/**
|
||||||
* An extensions point and registry for all the highlighters a node supports.
|
* An extensions point and registry for all the highlighters a node supports.
|
||||||
*/
|
*/
|
||||||
public class Highlighters extends ExtensionPoint.MapExtensionPoint<Highlighter> {
|
public class Highlighters extends ExtensionPoint.ClassMap<Highlighter> {
|
||||||
|
|
||||||
@Deprecated // remove in 3.0
|
@Deprecated // remove in 3.0
|
||||||
private static final String FAST_VECTOR_HIGHLIGHTER = "fast-vector-highlighter";
|
private static final String FAST_VECTOR_HIGHLIGHTER = "fast-vector-highlighter";
|
||||||
|
|
|
@ -47,6 +47,8 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||||
private String sortMode;
|
private String sortMode;
|
||||||
private QueryBuilder nestedFilter;
|
private QueryBuilder nestedFilter;
|
||||||
private String nestedPath;
|
private String nestedPath;
|
||||||
|
private Boolean coerce;
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a new distance based sort on a geo point like field.
|
* Constructs a new distance based sort on a geo point like field.
|
||||||
|
@ -146,6 +148,16 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoDistanceSortBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoDistanceSortBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject("_geo_distance");
|
builder.startObject("_geo_distance");
|
||||||
|
@ -181,6 +193,12 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||||
if (nestedFilter != null) {
|
if (nestedFilter != null) {
|
||||||
builder.field("nested_filter", nestedFilter, params);
|
builder.field("nested_filter", nestedFilter, params);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
|
@ -42,7 +43,6 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||||
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
|
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
|
||||||
|
@ -73,8 +73,9 @@ public class GeoDistanceSortParser implements SortParser {
|
||||||
MultiValueMode sortMode = null;
|
MultiValueMode sortMode = null;
|
||||||
NestedInnerQueryParseSupport nestedHelper = null;
|
NestedInnerQueryParseSupport nestedHelper = null;
|
||||||
|
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = context.queryParserService().getIndexCreatedVersion().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentName = parser.currentName();
|
String currentName = parser.currentName();
|
||||||
|
@ -107,9 +108,13 @@ public class GeoDistanceSortParser implements SortParser {
|
||||||
unit = DistanceUnit.fromString(parser.text());
|
unit = DistanceUnit.fromString(parser.text());
|
||||||
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
|
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
|
||||||
geoDistance = GeoDistance.fromString(parser.text());
|
geoDistance = GeoDistance.fromString(parser.text());
|
||||||
} else if ("normalize".equals(currentName)) {
|
} else if ("coerce".equals(currentName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
||||||
sortMode = MultiValueMode.fromString(parser.text());
|
sortMode = MultiValueMode.fromString(parser.text());
|
||||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||||
|
@ -126,9 +131,21 @@ public class GeoDistanceSortParser implements SortParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
for (GeoPoint point : geoPoints) {
|
for (GeoPoint point : geoPoints) {
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new ElasticsearchParseException("illegal latitude value [{}] for [GeoDistanceSort]", point.lat());
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new ElasticsearchParseException("illegal longitude value [{}] for [GeoDistanceSort]", point.lon());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
for (GeoPoint point : geoPoints) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.suggest;
|
package org.elasticsearch.search.suggest;
|
||||||
|
|
||||||
import org.elasticsearch.common.inject.Binder;
|
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.util.ExtensionPoint;
|
import org.elasticsearch.common.util.ExtensionPoint;
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
|
@ -31,7 +30,7 @@ import java.util.*;
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public final class Suggesters extends ExtensionPoint.MapExtensionPoint<Suggester> {
|
public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
|
||||||
private final Map<String, Suggester> parsers;
|
private final Map<String, Suggester> parsers;
|
||||||
|
|
||||||
public Suggesters() {
|
public Suggesters() {
|
||||||
|
|
|
@ -146,8 +146,8 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
||||||
// node id to actual channel
|
// node id to actual channel
|
||||||
protected final ConcurrentMap<DiscoveryNode, NodeChannels> connectedNodes = newConcurrentMap();
|
protected final ConcurrentMap<DiscoveryNode, NodeChannels> connectedNodes = newConcurrentMap();
|
||||||
protected final Map<String, ServerBootstrap> serverBootstraps = newConcurrentMap();
|
protected final Map<String, ServerBootstrap> serverBootstraps = newConcurrentMap();
|
||||||
protected final Map<String, Channel> serverChannels = newConcurrentMap();
|
protected final Map<String, List<Channel>> serverChannels = newConcurrentMap();
|
||||||
protected final Map<String, BoundTransportAddress> profileBoundAddresses = newConcurrentMap();
|
protected final ConcurrentMap<String, BoundTransportAddress> profileBoundAddresses = newConcurrentMap();
|
||||||
protected volatile TransportServiceAdapter transportServiceAdapter;
|
protected volatile TransportServiceAdapter transportServiceAdapter;
|
||||||
protected volatile BoundTransportAddress boundAddress;
|
protected volatile BoundTransportAddress boundAddress;
|
||||||
protected final KeyedLock<String> connectionLock = new KeyedLock<>();
|
protected final KeyedLock<String> connectionLock = new KeyedLock<>();
|
||||||
|
@ -286,7 +286,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
||||||
bindServerBootstrap(name, mergedSettings);
|
bindServerBootstrap(name, mergedSettings);
|
||||||
}
|
}
|
||||||
|
|
||||||
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(DEFAULT_PROFILE).getLocalAddress();
|
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(DEFAULT_PROFILE).get(0).getLocalAddress();
|
||||||
int publishPort = settings.getAsInt("transport.netty.publish_port", settings.getAsInt("transport.publish_port", boundAddress.getPort()));
|
int publishPort = settings.getAsInt("transport.netty.publish_port", settings.getAsInt("transport.publish_port", boundAddress.getPort()));
|
||||||
String publishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
|
String publishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
|
||||||
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
|
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
|
||||||
|
@ -397,23 +397,38 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
||||||
|
|
||||||
private void bindServerBootstrap(final String name, final Settings settings) {
|
private void bindServerBootstrap(final String name, final Settings settings) {
|
||||||
// Bind and start to accept incoming connections.
|
// Bind and start to accept incoming connections.
|
||||||
InetAddress hostAddressX;
|
InetAddress hostAddresses[];
|
||||||
String bindHost = settings.get("bind_host");
|
String bindHost = settings.get("bind_host");
|
||||||
try {
|
try {
|
||||||
hostAddressX = networkService.resolveBindHostAddress(bindHost);
|
hostAddresses = networkService.resolveBindHostAddress(bindHost);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e);
|
throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e);
|
||||||
}
|
}
|
||||||
final InetAddress hostAddress = hostAddressX;
|
for (InetAddress hostAddress : hostAddresses) {
|
||||||
|
bindServerBootstrap(name, hostAddress, settings);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void bindServerBootstrap(final String name, final InetAddress hostAddress, Settings settings) {
|
||||||
|
|
||||||
String port = settings.get("port");
|
String port = settings.get("port");
|
||||||
PortsRange portsRange = new PortsRange(port);
|
PortsRange portsRange = new PortsRange(port);
|
||||||
final AtomicReference<Exception> lastException = new AtomicReference<>();
|
final AtomicReference<Exception> lastException = new AtomicReference<>();
|
||||||
|
final AtomicReference<SocketAddress> boundSocket = new AtomicReference<>();
|
||||||
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
|
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
|
||||||
@Override
|
@Override
|
||||||
public boolean onPortNumber(int portNumber) {
|
public boolean onPortNumber(int portNumber) {
|
||||||
try {
|
try {
|
||||||
serverChannels.put(name, serverBootstraps.get(name).bind(new InetSocketAddress(hostAddress, portNumber)));
|
Channel channel = serverBootstraps.get(name).bind(new InetSocketAddress(hostAddress, portNumber));
|
||||||
|
synchronized (serverChannels) {
|
||||||
|
List<Channel> list = serverChannels.get(name);
|
||||||
|
if (list == null) {
|
||||||
|
list = new ArrayList<>();
|
||||||
|
serverChannels.put(name, list);
|
||||||
|
}
|
||||||
|
list.add(channel);
|
||||||
|
boundSocket.set(channel.getLocalAddress());
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
lastException.set(e);
|
lastException.set(e);
|
||||||
return false;
|
return false;
|
||||||
|
@ -426,14 +441,15 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!DEFAULT_PROFILE.equals(name)) {
|
if (!DEFAULT_PROFILE.equals(name)) {
|
||||||
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(name).getLocalAddress();
|
InetSocketAddress boundAddress = (InetSocketAddress) boundSocket.get();
|
||||||
int publishPort = settings.getAsInt("publish_port", boundAddress.getPort());
|
int publishPort = settings.getAsInt("publish_port", boundAddress.getPort());
|
||||||
String publishHost = settings.get("publish_host", boundAddress.getHostString());
|
String publishHost = settings.get("publish_host", boundAddress.getHostString());
|
||||||
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
|
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
|
||||||
profileBoundAddresses.put(name, new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress)));
|
// TODO: support real multihoming with publishing. Today we use putIfAbsent so only the prioritized address is published
|
||||||
|
profileBoundAddresses.putIfAbsent(name, new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress)));
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug("Bound profile [{}] to address [{}]", name, serverChannels.get(name).getLocalAddress());
|
logger.info("Bound profile [{}] to address [{}]", name, boundSocket.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createServerBootstrap(String name, Settings settings) {
|
private void createServerBootstrap(String name, Settings settings) {
|
||||||
|
@ -500,15 +516,17 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
||||||
nodeChannels.close();
|
nodeChannels.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
Iterator<Map.Entry<String, Channel>> serverChannelIterator = serverChannels.entrySet().iterator();
|
Iterator<Map.Entry<String, List<Channel>>> serverChannelIterator = serverChannels.entrySet().iterator();
|
||||||
while (serverChannelIterator.hasNext()) {
|
while (serverChannelIterator.hasNext()) {
|
||||||
Map.Entry<String, Channel> serverChannelEntry = serverChannelIterator.next();
|
Map.Entry<String, List<Channel>> serverChannelEntry = serverChannelIterator.next();
|
||||||
String name = serverChannelEntry.getKey();
|
String name = serverChannelEntry.getKey();
|
||||||
Channel serverChannel = serverChannelEntry.getValue();
|
List<Channel> serverChannels = serverChannelEntry.getValue();
|
||||||
try {
|
for (Channel serverChannel : serverChannels) {
|
||||||
serverChannel.close().awaitUninterruptibly();
|
try {
|
||||||
} catch (Throwable t) {
|
serverChannel.close().awaitUninterruptibly();
|
||||||
logger.debug("Error closing serverChannel for profile [{}]", t, name);
|
} catch (Throwable t) {
|
||||||
|
logger.debug("Error closing serverChannel for profile [{}]", t, name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
serverChannelIterator.remove();
|
serverChannelIterator.remove();
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ SYNOPSIS
|
||||||
|
|
||||||
DESCRIPTION
|
DESCRIPTION
|
||||||
|
|
||||||
Start elasticsearch and manage plugins
|
Start an elasticsearch node
|
||||||
|
|
||||||
COMMANDS
|
COMMANDS
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ DESCRIPTION
|
||||||
|
|
||||||
EXAMPLES
|
EXAMPLES
|
||||||
|
|
||||||
plugin install elasticsearch-analysis-kuromoji
|
plugin install analysis-kuromoji
|
||||||
|
|
||||||
plugin install elasticsearch/shield/latest
|
plugin install elasticsearch/shield/latest
|
||||||
|
|
||||||
|
@ -32,23 +32,24 @@ OFFICIAL PLUGINS
|
||||||
|
|
||||||
The following plugins are officially supported and can be installed by just referring to their name
|
The following plugins are officially supported and can be installed by just referring to their name
|
||||||
|
|
||||||
- elasticsearch-analysis-icu
|
- analysis-icu
|
||||||
- elasticsearch-analysis-kuromoji
|
- analysis-kuromoji
|
||||||
- elasticsearch-analysis-phonetic
|
- analysis-phonetic
|
||||||
- elasticsearch-analysis-smartcn
|
- analysis-smartcn
|
||||||
- elasticsearch-analysis-stempel
|
- analysis-stempel
|
||||||
- elasticsearch-cloud-aws
|
- cloud-aws
|
||||||
- elasticsearch-cloud-azure
|
- cloud-azure
|
||||||
- elasticsearch-cloud-gce
|
- cloud-gce
|
||||||
- elasticsearch-delete-by-query
|
- delete-by-query
|
||||||
- elasticsearch-lang-javascript
|
- lang-javascript
|
||||||
- elasticsearch-lang-python
|
- lang-python
|
||||||
- elasticsearch-mapper-size
|
- mapper-murmur3
|
||||||
|
- mapper-size
|
||||||
|
|
||||||
|
|
||||||
OPTIONS
|
OPTIONS
|
||||||
|
|
||||||
-u,--url URL to retrive the plugin from
|
-u,--url URL to retrieve the plugin from
|
||||||
|
|
||||||
-t,--timeout Timeout until the plugin download is abort
|
-t,--timeout Timeout until the plugin download is abort
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.cluster.*;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||||
import org.elasticsearch.monitor.fs.FsInfo;
|
import org.elasticsearch.monitor.fs.FsInfo;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -167,7 +168,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
||||||
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
|
usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes());
|
||||||
paths[0] = path;
|
paths[0] = path;
|
||||||
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
|
FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), paths);
|
||||||
return new NodeStats(new DiscoveryNode(nodeName, null, Version.V_2_0_0_beta1),
|
return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, Version.CURRENT),
|
||||||
System.currentTimeMillis(),
|
System.currentTimeMillis(),
|
||||||
null, null, null, null, null,
|
null, null, null, null, null,
|
||||||
fsInfo,
|
fsInfo,
|
||||||
|
|
|
@ -24,12 +24,15 @@ import org.elasticsearch.common.inject.spi.Elements;
|
||||||
import org.elasticsearch.common.inject.spi.InstanceBinding;
|
import org.elasticsearch.common.inject.spi.InstanceBinding;
|
||||||
import org.elasticsearch.common.inject.spi.LinkedKeyBinding;
|
import org.elasticsearch.common.inject.spi.LinkedKeyBinding;
|
||||||
import org.elasticsearch.common.inject.spi.ProviderInstanceBinding;
|
import org.elasticsearch.common.inject.spi.ProviderInstanceBinding;
|
||||||
|
import org.elasticsearch.common.inject.spi.ProviderLookup;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.lang.annotation.Annotation;
|
import java.lang.annotation.Annotation;
|
||||||
import java.lang.reflect.Type;
|
import java.lang.reflect.Type;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -77,7 +80,7 @@ public abstract class ModuleTestCase extends ESTestCase {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configures the module and checks a Map<String, Class> of the "to" class
|
* Configures the module and checks a Map<String, Class> of the "to" class
|
||||||
* is bound to "theClas".
|
* is bound to "theClass".
|
||||||
*/
|
*/
|
||||||
public void assertMapMultiBinding(Module module, Class to, Class theClass) {
|
public void assertMapMultiBinding(Module module, Class to, Class theClass) {
|
||||||
List<Element> elements = Elements.getElements(module);
|
List<Element> elements = Elements.getElements(module);
|
||||||
|
@ -138,10 +141,18 @@ public abstract class ModuleTestCase extends ESTestCase {
|
||||||
assertTrue("Did not find provider for set of " + to.getName(), providerFound);
|
assertTrue("Did not find provider for set of " + to.getName(), providerFound);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures the module, and ensures an instance is bound to the "to" class, and the
|
||||||
|
* provided tester returns true on the instance.
|
||||||
|
*/
|
||||||
public <T> void assertInstanceBinding(Module module, Class<T> to, Predicate<T> tester) {
|
public <T> void assertInstanceBinding(Module module, Class<T> to, Predicate<T> tester) {
|
||||||
assertInstanceBindingWithAnnotation(module, to, tester, null);
|
assertInstanceBindingWithAnnotation(module, to, tester, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Like {@link #assertInstanceBinding(Module, Class, Predicate)}, but filters the
|
||||||
|
* classes checked by the given annotation.
|
||||||
|
*/
|
||||||
public <T> void assertInstanceBindingWithAnnotation(Module module, Class<T> to, Predicate<T> tester, Class<? extends Annotation> annotation) {
|
public <T> void assertInstanceBindingWithAnnotation(Module module, Class<T> to, Predicate<T> tester, Class<? extends Annotation> annotation) {
|
||||||
List<Element> elements = Elements.getElements(module);
|
List<Element> elements = Elements.getElements(module);
|
||||||
for (Element element : elements) {
|
for (Element element : elements) {
|
||||||
|
@ -161,4 +172,39 @@ public abstract class ModuleTestCase extends ESTestCase {
|
||||||
}
|
}
|
||||||
fail("Did not find any instance binding to " + to.getName() + ". Found these bindings:\n" + s);
|
fail("Did not find any instance binding to " + to.getName() + ". Found these bindings:\n" + s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configures the module, and ensures a map exists between the "keyType" and "valueType",
|
||||||
|
* and that all of the "expected" values are bound.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public <K,V> void assertMapInstanceBinding(Module module, Class<K> keyType, Class<V> valueType, Map<K,V> expected) throws Exception {
|
||||||
|
// this method is insane because java type erasure makes it incredibly difficult...
|
||||||
|
Map<K,Key> keys = new HashMap<>();
|
||||||
|
Map<Key,V> values = new HashMap<>();
|
||||||
|
List<Element> elements = Elements.getElements(module);
|
||||||
|
for (Element element : elements) {
|
||||||
|
if (element instanceof InstanceBinding) {
|
||||||
|
InstanceBinding binding = (InstanceBinding) element;
|
||||||
|
if (binding.getKey().getRawType().equals(valueType)) {
|
||||||
|
values.put(binding.getKey(), (V)binding.getInstance());
|
||||||
|
} else if (binding.getInstance() instanceof Map.Entry) {
|
||||||
|
Map.Entry entry = (Map.Entry)binding.getInstance();
|
||||||
|
Object key = entry.getKey();
|
||||||
|
Object providerValue = entry.getValue();
|
||||||
|
if (key.getClass().equals(keyType) && providerValue instanceof ProviderLookup.ProviderImpl) {
|
||||||
|
ProviderLookup.ProviderImpl provider = (ProviderLookup.ProviderImpl)providerValue;
|
||||||
|
keys.put((K)key, provider.getKey());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (Map.Entry<K, V> entry : expected.entrySet()) {
|
||||||
|
Key valueKey = keys.get(entry.getKey());
|
||||||
|
assertNotNull("Could not find binding for key [" + entry.getKey() + "], found these keys:\n" + keys.keySet(), valueKey);
|
||||||
|
V value = values.get(valueKey);
|
||||||
|
assertNotNull("Could not find value for instance key [" + valueKey + "], found these bindings:\n" + elements);
|
||||||
|
assertEquals(entry.getValue(), value);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.network;
|
||||||
|
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.net.InetAddress;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests for network utils. Please avoid using any methods that cause DNS lookups!
|
||||||
|
*/
|
||||||
|
public class NetworkUtilsTests extends ESTestCase {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* test sort key order respects PREFER_IPV4
|
||||||
|
*/
|
||||||
|
public void testSortKey() throws Exception {
|
||||||
|
InetAddress localhostv4 = InetAddress.getByName("127.0.0.1");
|
||||||
|
InetAddress localhostv6 = InetAddress.getByName("::1");
|
||||||
|
assertTrue(NetworkUtils.sortKey(localhostv4, false) < NetworkUtils.sortKey(localhostv6, false));
|
||||||
|
assertTrue(NetworkUtils.sortKey(localhostv6, true) < NetworkUtils.sortKey(localhostv4, true));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* test ordinary addresses sort before private addresses
|
||||||
|
*/
|
||||||
|
public void testSortKeySiteLocal() throws Exception {
|
||||||
|
InetAddress siteLocal = InetAddress.getByName("172.16.0.1");
|
||||||
|
assert siteLocal.isSiteLocalAddress();
|
||||||
|
InetAddress ordinary = InetAddress.getByName("192.192.192.192");
|
||||||
|
assertTrue(NetworkUtils.sortKey(ordinary, true) < NetworkUtils.sortKey(siteLocal, true));
|
||||||
|
assertTrue(NetworkUtils.sortKey(ordinary, false) < NetworkUtils.sortKey(siteLocal, false));
|
||||||
|
|
||||||
|
InetAddress siteLocal6 = InetAddress.getByName("fec0::1");
|
||||||
|
assert siteLocal6.isSiteLocalAddress();
|
||||||
|
InetAddress ordinary6 = InetAddress.getByName("fddd::1");
|
||||||
|
assertTrue(NetworkUtils.sortKey(ordinary6, true) < NetworkUtils.sortKey(siteLocal6, true));
|
||||||
|
assertTrue(NetworkUtils.sortKey(ordinary6, false) < NetworkUtils.sortKey(siteLocal6, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* test private addresses sort before link local addresses
|
||||||
|
*/
|
||||||
|
public void testSortKeyLinkLocal() throws Exception {
|
||||||
|
InetAddress linkLocal = InetAddress.getByName("fe80::1");
|
||||||
|
assert linkLocal.isLinkLocalAddress();
|
||||||
|
InetAddress ordinary = InetAddress.getByName("fddd::1");
|
||||||
|
assertTrue(NetworkUtils.sortKey(ordinary, true) < NetworkUtils.sortKey(linkLocal, true));
|
||||||
|
assertTrue(NetworkUtils.sortKey(ordinary, false) < NetworkUtils.sortKey(linkLocal, false));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test filtering out ipv4/ipv6 addresses
|
||||||
|
*/
|
||||||
|
public void testFilter() throws Exception {
|
||||||
|
InetAddress addresses[] = { InetAddress.getByName("::1"), InetAddress.getByName("127.0.0.1") };
|
||||||
|
assertArrayEquals(new InetAddress[] { InetAddress.getByName("127.0.0.1") }, NetworkUtils.filterIPV4(addresses));
|
||||||
|
assertArrayEquals(new InetAddress[] { InetAddress.getByName("::1") }, NetworkUtils.filterIPV6(addresses));
|
||||||
|
}
|
||||||
|
}
|
|
@ -21,13 +21,14 @@ package org.elasticsearch.common.util.concurrent;
|
||||||
|
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.junit.Test;
|
import org.hamcrest.Matcher;
|
||||||
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.ThreadPoolExecutor;
|
import java.util.concurrent.ThreadPoolExecutor;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.anyOf;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.lessThan;
|
import static org.hamcrest.Matchers.lessThan;
|
||||||
|
@ -275,7 +276,19 @@ public class EsExecutorsTests extends ESTestCase {
|
||||||
assertThat(message, containsString("on EsThreadPoolExecutor[testRejectionMessage"));
|
assertThat(message, containsString("on EsThreadPoolExecutor[testRejectionMessage"));
|
||||||
assertThat(message, containsString("queue capacity = " + queue));
|
assertThat(message, containsString("queue capacity = " + queue));
|
||||||
assertThat(message, containsString("[Running"));
|
assertThat(message, containsString("[Running"));
|
||||||
assertThat(message, containsString("active threads = " + pool));
|
/*
|
||||||
|
* While you'd expect all threads in the pool to be active when the queue gets long enough to cause rejections this isn't
|
||||||
|
* always the case. Sometimes you'll see "active threads = <pool - 1>", presumably because one of those threads has finished
|
||||||
|
* its current task but has yet to pick up another task. You too can reproduce this by adding the @Repeat annotation to this
|
||||||
|
* test with something like 10000 iterations. I suspect you could see "active threads = <any natural number <= to pool>". So
|
||||||
|
* that is what we assert.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Matcher<String>[] activeThreads = new Matcher[pool + 1];
|
||||||
|
for (int p = 0; p <= pool; p++) {
|
||||||
|
activeThreads[p] = containsString("active threads = " + p);
|
||||||
|
}
|
||||||
|
assertThat(message, anyOf(activeThreads));
|
||||||
assertThat(message, containsString("queued tasks = " + queue));
|
assertThat(message, containsString("queued tasks = " + queue));
|
||||||
assertThat(message, containsString("completed tasks = 0"));
|
assertThat(message, containsString("completed tasks = 0"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1116,7 +1116,7 @@ public class GetActionIT extends ESIntegTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testGeneratedNumberFieldsUnstored() throws IOException {
|
public void testGeneratedNumberFieldsUnstored() throws IOException {
|
||||||
indexSingleDocumentWithNumericFieldsGeneratedFromText(false, randomBoolean());
|
indexSingleDocumentWithNumericFieldsGeneratedFromText(false, randomBoolean());
|
||||||
String[] fieldsList = {"token_count", "text.token_count", "murmur", "text.murmur"};
|
String[] fieldsList = {"token_count", "text.token_count"};
|
||||||
// before refresh - document is only in translog
|
// before refresh - document is only in translog
|
||||||
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||||
refresh();
|
refresh();
|
||||||
|
@ -1130,7 +1130,7 @@ public class GetActionIT extends ESIntegTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testGeneratedNumberFieldsStored() throws IOException {
|
public void testGeneratedNumberFieldsStored() throws IOException {
|
||||||
indexSingleDocumentWithNumericFieldsGeneratedFromText(true, randomBoolean());
|
indexSingleDocumentWithNumericFieldsGeneratedFromText(true, randomBoolean());
|
||||||
String[] fieldsList = {"token_count", "text.token_count", "murmur", "text.murmur"};
|
String[] fieldsList = {"token_count", "text.token_count"};
|
||||||
// before refresh - document is only in translog
|
// before refresh - document is only in translog
|
||||||
assertGetFieldsNull(indexOrAlias(), "doc", "1", fieldsList);
|
assertGetFieldsNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||||
assertGetFieldsException(indexOrAlias(), "doc", "1", fieldsList);
|
assertGetFieldsException(indexOrAlias(), "doc", "1", fieldsList);
|
||||||
|
@ -1159,10 +1159,6 @@ public class GetActionIT extends ESIntegTestCase {
|
||||||
" \"analyzer\": \"standard\",\n" +
|
" \"analyzer\": \"standard\",\n" +
|
||||||
" \"store\": \"" + storedString + "\"" +
|
" \"store\": \"" + storedString + "\"" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
" \"murmur\": {\n" +
|
|
||||||
" \"type\": \"murmur3\",\n" +
|
|
||||||
" \"store\": \"" + storedString + "\"" +
|
|
||||||
" },\n" +
|
|
||||||
" \"text\": {\n" +
|
" \"text\": {\n" +
|
||||||
" \"type\": \"string\",\n" +
|
" \"type\": \"string\",\n" +
|
||||||
" \"fields\": {\n" +
|
" \"fields\": {\n" +
|
||||||
|
@ -1170,10 +1166,6 @@ public class GetActionIT extends ESIntegTestCase {
|
||||||
" \"type\": \"token_count\",\n" +
|
" \"type\": \"token_count\",\n" +
|
||||||
" \"analyzer\": \"standard\",\n" +
|
" \"analyzer\": \"standard\",\n" +
|
||||||
" \"store\": \"" + storedString + "\"" +
|
" \"store\": \"" + storedString + "\"" +
|
||||||
" },\n" +
|
|
||||||
" \"murmur\": {\n" +
|
|
||||||
" \"type\": \"murmur3\",\n" +
|
|
||||||
" \"store\": \"" + storedString + "\"" +
|
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }" +
|
" }" +
|
||||||
|
@ -1185,7 +1177,6 @@ public class GetActionIT extends ESIntegTestCase {
|
||||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource));
|
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource));
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
String doc = "{\n" +
|
String doc = "{\n" +
|
||||||
" \"murmur\": \"Some value that can be hashed\",\n" +
|
|
||||||
" \"token_count\": \"A text with five words.\",\n" +
|
" \"token_count\": \"A text with five words.\",\n" +
|
||||||
" \"text\": \"A text with five words.\"\n" +
|
" \"text\": \"A text with five words.\"\n" +
|
||||||
"}\n";
|
"}\n";
|
||||||
|
|
|
@ -40,7 +40,6 @@ import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNameModule;
|
import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
|
import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.hamcrest.MatcherAssert;
|
import org.hamcrest.MatcherAssert;
|
||||||
|
@ -55,7 +54,9 @@ import java.nio.file.Path;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -66,7 +67,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
||||||
|
|
||||||
public AnalysisService getAnalysisService(Settings settings) {
|
public AnalysisService getAnalysisService(Settings settings) {
|
||||||
Index index = new Index("test");
|
Index index = new Index("test");
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
AnalysisModule analysisModule = new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class));
|
AnalysisModule analysisModule = new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class));
|
||||||
analysisModule.addTokenFilter("myfilter", MyFilterTokenFilterFactory.class);
|
analysisModule.addTokenFilter("myfilter", MyFilterTokenFilterFactory.class);
|
||||||
injector = new ModulesBuilder().add(
|
injector = new ModulesBuilder().add(
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.env.EnvironmentModule;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNameModule;
|
import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
import org.elasticsearch.indices.IndicesModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
|
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
|
@ -52,8 +52,15 @@ public class AnalysisTestsHelper {
|
||||||
if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) {
|
if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) {
|
||||||
settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
settings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||||
}
|
}
|
||||||
|
IndicesModule indicesModule = new IndicesModule(settings) {
|
||||||
|
@Override
|
||||||
|
public void configure() {
|
||||||
|
// skip services
|
||||||
|
bindHunspellExtension();
|
||||||
|
}
|
||||||
|
};
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings),
|
||||||
new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
new EnvironmentModule(new Environment(settings)), indicesModule).createInjector();
|
||||||
|
|
||||||
AnalysisModule analysisModule = new AnalysisModule(settings,
|
AnalysisModule analysisModule = new AnalysisModule(settings,
|
||||||
parentInjector.getInstance(IndicesAnalysisService.class));
|
parentInjector.getInstance(IndicesAnalysisService.class));
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.env.EnvironmentModule;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNameModule;
|
import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.test.ESTokenStreamTestCase;
|
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -51,7 +50,7 @@ public class CharFilterTests extends ESTokenStreamTestCase {
|
||||||
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping")
|
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping")
|
||||||
.put("path.home", createTempDir().toString())
|
.put("path.home", createTempDir().toString())
|
||||||
.build();
|
.build();
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
new IndexSettingsModule(index, settings),
|
new IndexSettingsModule(index, settings),
|
||||||
new IndexNameModule(index),
|
new IndexNameModule(index),
|
||||||
|
@ -77,7 +76,7 @@ public class CharFilterTests extends ESTokenStreamTestCase {
|
||||||
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip")
|
.putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip")
|
||||||
.put("path.home", createTempDir().toString())
|
.put("path.home", createTempDir().toString())
|
||||||
.build();
|
.build();
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
new IndexSettingsModule(index, settings),
|
new IndexSettingsModule(index, settings),
|
||||||
new IndexNameModule(index),
|
new IndexNameModule(index),
|
||||||
|
|
|
@ -37,7 +37,6 @@ import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory;
|
import org.elasticsearch.index.analysis.compound.DictionaryCompoundWordTokenFilterFactory;
|
||||||
import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
|
import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.hamcrest.MatcherAssert;
|
import org.hamcrest.MatcherAssert;
|
||||||
|
@ -48,7 +47,9 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||||
import static org.hamcrest.Matchers.*;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.hasItems;
|
||||||
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -58,7 +59,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
||||||
public void testDefaultsCompoundAnalysis() throws Exception {
|
public void testDefaultsCompoundAnalysis() throws Exception {
|
||||||
Index index = new Index("test");
|
Index index = new Index("test");
|
||||||
Settings settings = getJsonSettings();
|
Settings settings = getJsonSettings();
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
AnalysisModule analysisModule = new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class));
|
AnalysisModule analysisModule = new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class));
|
||||||
analysisModule.addTokenFilter("myfilter", MyFilterTokenFilterFactory.class);
|
analysisModule.addTokenFilter("myfilter", MyFilterTokenFilterFactory.class);
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
|
@ -85,7 +86,7 @@ public class CompoundAnalysisTests extends ESTestCase {
|
||||||
|
|
||||||
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
|
private List<String> analyze(Settings settings, String analyzerName, String text) throws IOException {
|
||||||
Index index = new Index("test");
|
Index index = new Index("test");
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
AnalysisModule analysisModule = new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class));
|
AnalysisModule analysisModule = new AnalysisModule(settings, parentInjector.getInstance(IndicesAnalysisService.class));
|
||||||
analysisModule.addTokenFilter("myfilter", MyFilterTokenFilterFactory.class);
|
analysisModule.addTokenFilter("myfilter", MyFilterTokenFilterFactory.class);
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.env.EnvironmentModule;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNameModule;
|
import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.test.ESTokenStreamTestCase;
|
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -48,7 +47,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase {
|
||||||
.loadFromStream(json, getClass().getResourceAsStream(json))
|
.loadFromStream(json, getClass().getResourceAsStream(json))
|
||||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||||
.build();
|
.build();
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
new IndexSettingsModule(index, settings),
|
new IndexSettingsModule(index, settings),
|
||||||
new IndexNameModule(index),
|
new IndexNameModule(index),
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.env.EnvironmentModule;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNameModule;
|
import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.test.ESTokenStreamTestCase;
|
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -48,7 +47,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase {
|
||||||
.put("path.home", createTempDir().toString())
|
.put("path.home", createTempDir().toString())
|
||||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||||
.build();
|
.build();
|
||||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings)), new IndicesAnalysisModule()).createInjector();
|
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings), new EnvironmentModule(new Environment(settings))).createInjector();
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
new IndexSettingsModule(index, settings),
|
new IndexSettingsModule(index, settings),
|
||||||
new IndexNameModule(index),
|
new IndexNameModule(index),
|
||||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.analysis.AnalysisModule;
|
import org.elasticsearch.index.analysis.AnalysisModule;
|
||||||
import org.elasticsearch.index.analysis.AnalysisService;
|
import org.elasticsearch.index.analysis.AnalysisService;
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.hamcrest.MatcherAssert;
|
import org.hamcrest.MatcherAssert;
|
||||||
|
@ -80,8 +79,7 @@ public class SynonymsAnalysisTest extends ESTestCase {
|
||||||
|
|
||||||
Injector parentInjector = new ModulesBuilder().add(
|
Injector parentInjector = new ModulesBuilder().add(
|
||||||
new SettingsModule(settings),
|
new SettingsModule(settings),
|
||||||
new EnvironmentModule(new Environment(settings)),
|
new EnvironmentModule(new Environment(settings)))
|
||||||
new IndicesAnalysisModule())
|
|
||||||
.createInjector();
|
.createInjector();
|
||||||
Injector injector = new ModulesBuilder().add(
|
Injector injector = new ModulesBuilder().add(
|
||||||
new IndexSettingsModule(index, settings),
|
new IndexSettingsModule(index, settings),
|
||||||
|
|
|
@ -0,0 +1,89 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.cache;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.QueryCachingPolicy;
|
||||||
|
import org.apache.lucene.search.Weight;
|
||||||
|
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.cache.query.QueryCache;
|
||||||
|
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||||
|
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class IndexCacheModuleTests extends ModuleTestCase {
|
||||||
|
|
||||||
|
public void testCannotRegisterProvidedImplementations() {
|
||||||
|
IndexCacheModule module = new IndexCacheModule(Settings.EMPTY);
|
||||||
|
try {
|
||||||
|
module.registerQueryCache("index", IndexQueryCache.class);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [index]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
module.registerQueryCache("none", NoneQueryCache.class);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [none]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegisterCustomQueryCache() {
|
||||||
|
IndexCacheModule module = new IndexCacheModule(
|
||||||
|
Settings.builder().put(IndexCacheModule.QUERY_CACHE_TYPE, "custom").build()
|
||||||
|
);
|
||||||
|
module.registerQueryCache("custom", CustomQueryCache.class);
|
||||||
|
try {
|
||||||
|
module.registerQueryCache("custom", CustomQueryCache.class);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertEquals(e.getMessage(), "Can't register the same [query_cache] more than once for [custom]");
|
||||||
|
}
|
||||||
|
assertBinding(module, QueryCache.class, CustomQueryCache.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDefaultQueryCacheImplIsSelected() {
|
||||||
|
IndexCacheModule module = new IndexCacheModule(Settings.EMPTY);
|
||||||
|
assertBinding(module, QueryCache.class, IndexQueryCache.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
class CustomQueryCache implements QueryCache {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear(String reason) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Index index() {
|
||||||
|
return new Index("test");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Weight doCache(Weight weight, QueryCachingPolicy policy) {
|
||||||
|
return weight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -535,7 +535,7 @@ public class InternalEngineTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
public IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException {
|
||||||
counter.incrementAndGet();
|
counter.incrementAndGet();
|
||||||
return searcher;
|
return searcher;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,10 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper.geo;
|
package org.elasticsearch.index.mapper.geo;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
|
@ -26,6 +29,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.VersionUtils;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -138,7 +142,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
public void testNormalizeLatLonValuesDefault() throws Exception {
|
public void testNormalizeLatLonValuesDefault() throws Exception {
|
||||||
// default to normalize
|
// default to normalize
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
|
.startObject("properties").startObject("point").field("type", "geo_point").field("coerce", true)
|
||||||
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
@ -171,7 +176,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testValidateLatLonValues() throws Exception {
|
public void testValidateLatLonValues() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", true).endObject().endObject()
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
|
||||||
|
.field("ignore_malformed", false).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
@ -231,7 +237,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testNoValidateLatLonValues() throws Exception {
|
public void testNoValidateLatLonValues() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", false).endObject().endObject()
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
|
||||||
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
|
@ -472,30 +479,161 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test that expected exceptions are thrown when creating a new index with deprecated options
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testOptionDeprecation() throws Exception {
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
// test deprecation exceptions on newly created indexes
|
||||||
|
try {
|
||||||
|
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(validateMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lat", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(validateMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lon", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(validateMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
// test deprecated normalize
|
||||||
|
try {
|
||||||
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(normalizeMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lat", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(normalizeMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lon", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(normalizeMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test backward compatibility
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testBackwardCompatibleOptions() throws Exception {
|
||||||
|
// backward compatibility testing
|
||||||
|
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||||
|
Version.V_1_7_1)).build();
|
||||||
|
|
||||||
|
// validate
|
||||||
|
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate", false).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lat", false).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lon", false).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||||
|
|
||||||
|
// normalize
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lat", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lon", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGeoPointMapperMerge() throws Exception {
|
public void testGeoPointMapperMerge() throws Exception {
|
||||||
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
.field("validate", true).endObject().endObject()
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
||||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true)
|
||||||
.field("validate", false).endObject().endObject()
|
.field("ignore_malformed", false).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||||
|
|
||||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||||
assertThat(mergeResult.buildConflicts().length, equalTo(2));
|
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||||
// todo better way of checking conflict?
|
// todo better way of checking conflict?
|
||||||
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
assertThat("mapper [point] has different lat_lon", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
||||||
|
|
||||||
// correct mapping and ensure no failures
|
// correct mapping and ensure no failures
|
||||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
.field("validate", true).field("normalize", true).endObject().endObject()
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
stage2 = parser.parse(stage2Mapping);
|
stage2 = parser.parse(stage2Mapping);
|
||||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||||
|
|
|
@ -31,7 +31,7 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected int numProperties() {
|
protected int numProperties() {
|
||||||
return 6 + super.numProperties();
|
return 4 + super.numProperties();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -40,11 +40,9 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
||||||
switch (propNum) {
|
switch (propNum) {
|
||||||
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
|
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
|
||||||
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
|
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
|
||||||
case 2: gft.setValidateLon(!gft.validateLon()); break;
|
case 2: gft.setIgnoreMalformed(!gft.ignoreMalformed()); break;
|
||||||
case 3: gft.setValidateLat(!gft.validateLat()); break;
|
case 3: gft.setCoerce(!gft.coerce()); break;
|
||||||
case 4: gft.setNormalizeLon(!gft.normalizeLon()); break;
|
default: super.modifyProperty(ft, propNum - 4);
|
||||||
case 5: gft.setNormalizeLat(!gft.normalizeLat()); break;
|
|
||||||
default: super.modifyProperty(ft, propNum - 6);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.action.ActionModule;
|
||||||
import org.elasticsearch.cluster.ClusterService;
|
import org.elasticsearch.cluster.ClusterService;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
|
@ -39,15 +40,13 @@ import org.elasticsearch.index.IndexNameModule;
|
||||||
import org.elasticsearch.index.analysis.AnalysisModule;
|
import org.elasticsearch.index.analysis.AnalysisModule;
|
||||||
import org.elasticsearch.index.cache.IndexCacheModule;
|
import org.elasticsearch.index.cache.IndexCacheModule;
|
||||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper;
|
|
||||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||||
import org.elasticsearch.index.similarity.SimilarityModule;
|
import org.elasticsearch.index.similarity.SimilarityModule;
|
||||||
|
import org.elasticsearch.indices.IndicesModule;
|
||||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
|
||||||
import org.elasticsearch.script.ScriptModule;
|
import org.elasticsearch.script.ScriptModule;
|
||||||
import org.elasticsearch.search.SearchModule;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||||
|
@ -80,7 +79,13 @@ public class TemplateQueryParserTest extends ESTestCase {
|
||||||
new EnvironmentModule(new Environment(settings)),
|
new EnvironmentModule(new Environment(settings)),
|
||||||
new SettingsModule(settings),
|
new SettingsModule(settings),
|
||||||
new ThreadPoolModule(new ThreadPool(settings)),
|
new ThreadPoolModule(new ThreadPool(settings)),
|
||||||
new IndicesQueriesModule(),
|
new IndicesModule(settings) {
|
||||||
|
@Override
|
||||||
|
public void configure() {
|
||||||
|
// skip services
|
||||||
|
bindQueryParsersExtension();
|
||||||
|
}
|
||||||
|
},
|
||||||
new ScriptModule(settings),
|
new ScriptModule(settings),
|
||||||
new IndexSettingsModule(index, settings),
|
new IndexSettingsModule(index, settings),
|
||||||
new IndexCacheModule(settings),
|
new IndexCacheModule(settings),
|
||||||
|
|
|
@ -23,15 +23,13 @@ import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.elasticsearch.common.inject.Module;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryParseContext;
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
import org.elasticsearch.index.query.QueryParser;
|
import org.elasticsearch.index.query.QueryParser;
|
||||||
import org.elasticsearch.index.query.QueryParsingException;
|
import org.elasticsearch.index.query.QueryParsingException;
|
||||||
import org.elasticsearch.indices.query.IndicesQueriesModule;
|
import org.elasticsearch.indices.IndicesModule;
|
||||||
import org.elasticsearch.plugins.AbstractPlugin;
|
import org.elasticsearch.plugins.AbstractPlugin;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -48,16 +46,8 @@ public class DummyQueryParserPlugin extends AbstractPlugin {
|
||||||
return "dummy query";
|
return "dummy query";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public void onModule(IndicesModule module) {
|
||||||
public void processModule(Module module) {
|
module.registerQueryParser(DummyQueryParser.class);
|
||||||
if (module instanceof IndicesQueriesModule) {
|
|
||||||
IndicesQueriesModule indicesQueriesModule = (IndicesQueriesModule) module;
|
|
||||||
indicesQueriesModule.addQuery(DummyQueryParser.class);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public Settings settings() {
|
|
||||||
return Settings.EMPTY;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class DummyQueryBuilder extends QueryBuilder {
|
public static class DummyQueryBuilder extends QueryBuilder {
|
||||||
|
|
|
@ -339,34 +339,28 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testNormalizePoint_outsideNormalRange_withOptions() {
|
public void testNormalizePoint_outsideNormalRange_withOptions() {
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
boolean normLat = randomBoolean();
|
boolean normalize = randomBoolean();
|
||||||
boolean normLon = randomBoolean();
|
|
||||||
double normalisedLat = (randomDouble() * 180.0) - 90.0;
|
double normalisedLat = (randomDouble() * 180.0) - 90.0;
|
||||||
double normalisedLon = (randomDouble() * 360.0) - 180.0;
|
double normalisedLon = (randomDouble() * 360.0) - 180.0;
|
||||||
int shiftLat = randomIntBetween(1, 10000);
|
int shift = randomIntBetween(1, 10000);
|
||||||
int shiftLon = randomIntBetween(1, 10000);
|
double testLat = normalisedLat + (180.0 * shift);
|
||||||
double testLat = normalisedLat + (180.0 * shiftLat);
|
double testLon = normalisedLon + (360.0 * shift);
|
||||||
double testLon = normalisedLon + (360.0 * shiftLon);
|
|
||||||
|
|
||||||
double expectedLat;
|
double expectedLat;
|
||||||
double expectedLon;
|
double expectedLon;
|
||||||
if (normLat) {
|
if (normalize) {
|
||||||
expectedLat = normalisedLat * (shiftLat % 2 == 0 ? 1 : -1);
|
expectedLat = normalisedLat * (shift % 2 == 0 ? 1 : -1);
|
||||||
} else {
|
expectedLon = normalisedLon + ((shift % 2 == 1) ? 180 : 0);
|
||||||
expectedLat = testLat;
|
|
||||||
}
|
|
||||||
if (normLon) {
|
|
||||||
expectedLon = normalisedLon + ((normLat && shiftLat % 2 == 1) ? 180 : 0);
|
|
||||||
if (expectedLon > 180.0) {
|
if (expectedLon > 180.0) {
|
||||||
expectedLon -= 360;
|
expectedLon -= 360;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
double shiftValue = normalisedLon > 0 ? -180 : 180;
|
expectedLat = testLat;
|
||||||
expectedLon = testLon + ((normLat && shiftLat % 2 == 1) ? shiftValue : 0);
|
expectedLon = testLon;
|
||||||
}
|
}
|
||||||
GeoPoint testPoint = new GeoPoint(testLat, testLon);
|
GeoPoint testPoint = new GeoPoint(testLat, testLon);
|
||||||
GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon);
|
GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon);
|
||||||
GeoUtils.normalizePoint(testPoint, normLat, normLon);
|
GeoUtils.normalizePoint(testPoint, normalize, normalize);
|
||||||
assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR));
|
assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR));
|
||||||
assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR));
|
assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR));
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.indices;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.hunspell.Dictionary;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.index.query.QueryParseContext;
|
||||||
|
import org.elasticsearch.index.query.QueryParser;
|
||||||
|
import org.elasticsearch.index.query.QueryParsingException;
|
||||||
|
import org.elasticsearch.index.query.TermQueryParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
public class IndicesModuleTests extends ModuleTestCase {
|
||||||
|
|
||||||
|
static class FakeQueryParser implements QueryParser {
|
||||||
|
@Override
|
||||||
|
public String[] names() {
|
||||||
|
return new String[] {"fake-query-parser"};
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegisterQueryParser() {
|
||||||
|
IndicesModule module = new IndicesModule(Settings.EMPTY);
|
||||||
|
module.registerQueryParser(FakeQueryParser.class);
|
||||||
|
assertSetMultiBinding(module, QueryParser.class, FakeQueryParser.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegisterQueryParserDuplicate() {
|
||||||
|
IndicesModule module = new IndicesModule(Settings.EMPTY);
|
||||||
|
try {
|
||||||
|
module.registerQueryParser(TermQueryParser.class);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertEquals(e.getMessage(), "Can't register the same [query_parser] more than once for [" + TermQueryParser.class.getName() + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegisterHunspellDictionary() throws Exception {
|
||||||
|
IndicesModule module = new IndicesModule(Settings.EMPTY);
|
||||||
|
InputStream aff = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.aff");
|
||||||
|
InputStream dic = getClass().getResourceAsStream("/indices/analyze/conf_dir/hunspell/en_US/en_US.dic");
|
||||||
|
Dictionary dictionary = new Dictionary(aff, dic);
|
||||||
|
module.registerHunspellDictionary("foo", dictionary);
|
||||||
|
assertMapInstanceBinding(module, String.class, Dictionary.class, Collections.singletonMap("foo", dictionary));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegisterHunspellDictionaryDuplicate() {
|
||||||
|
IndicesModule module = new IndicesModule(Settings.EMPTY);
|
||||||
|
try {
|
||||||
|
module.registerQueryParser(TermQueryParser.class);
|
||||||
|
} catch (IllegalArgumentException e) {
|
||||||
|
assertEquals(e.getMessage(), "Can't register the same [query_parser] more than once for [" + TermQueryParser.class.getName() + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -56,6 +56,13 @@ public class RecoveryStatusTests extends ESSingleNodeTestCase {
|
||||||
assertSame(openIndexOutput, indexOutput);
|
assertSame(openIndexOutput, indexOutput);
|
||||||
openIndexOutput.writeInt(1);
|
openIndexOutput.writeInt(1);
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
status.openAndPutIndexOutput("foo.bar", new StoreFileMetaData("foo.bar", 8), status.store());
|
||||||
|
fail("file foo.bar is already opened and registered");
|
||||||
|
} catch (IllegalStateException ex) {
|
||||||
|
assertEquals("output for file [foo.bar] has already been created", ex.getMessage());
|
||||||
|
// all well = it's already registered
|
||||||
|
}
|
||||||
status.removeOpenIndexOutputs("foo.bar");
|
status.removeOpenIndexOutputs("foo.bar");
|
||||||
Set<String> strings = Sets.newHashSet(status.store().directory().listAll());
|
Set<String> strings = Sets.newHashSet(status.store().directory().listAll());
|
||||||
String expectedFile = null;
|
String expectedFile = null;
|
||||||
|
|
|
@ -344,7 +344,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
||||||
.addAlias(new Alias("templated_alias-{index}"))
|
.addAlias(new Alias("templated_alias-{index}"))
|
||||||
.addAlias(new Alias("filtered_alias").filter("{\"type\":{\"value\":\"type2\"}}"))
|
.addAlias(new Alias("filtered_alias").filter("{\"type\":{\"value\":\"type2\"}}"))
|
||||||
.addAlias(new Alias("complex_filtered_alias")
|
.addAlias(new Alias("complex_filtered_alias")
|
||||||
.filter(QueryBuilders.termsQuery("_type", "typeX", "typeY", "typeZ").execution("bool")))
|
.filter(QueryBuilders.termsQuery("_type", "typeX", "typeY", "typeZ")))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
assertAcked(prepareCreate("test_index").addMapping("type1").addMapping("type2").addMapping("typeX").addMapping("typeY").addMapping("typeZ"));
|
assertAcked(prepareCreate("test_index").addMapping("type1").addMapping("type2").addMapping("typeX").addMapping("typeY").addMapping("typeZ"));
|
||||||
|
|
|
@ -45,18 +45,34 @@ public class OsProbeTests extends ESTestCase {
|
||||||
OsStats stats = probe.osStats();
|
OsStats stats = probe.osStats();
|
||||||
assertNotNull(stats);
|
assertNotNull(stats);
|
||||||
assertThat(stats.getTimestamp(), greaterThan(0L));
|
assertThat(stats.getTimestamp(), greaterThan(0L));
|
||||||
assertThat(stats.getLoadAverage(), anyOf(equalTo((double) -1), greaterThanOrEqualTo((double) 0)));
|
if (Constants.WINDOWS) {
|
||||||
|
// Load average is always -1 on Windows platforms
|
||||||
|
assertThat(stats.getLoadAverage(), equalTo((double) -1));
|
||||||
|
} else {
|
||||||
|
// Load average can be negative if not available or not computed yet, otherwise it should be >= 0
|
||||||
|
assertThat(stats.getLoadAverage(), anyOf(lessThan((double) 0), greaterThanOrEqualTo((double) 0)));
|
||||||
|
}
|
||||||
|
|
||||||
assertNotNull(stats.getMem());
|
assertNotNull(stats.getMem());
|
||||||
assertThat(stats.getMem().getTotal().bytes(), anyOf(equalTo(-1L), greaterThan(0L)));
|
assertThat(stats.getMem().getTotal().bytes(), greaterThan(0L));
|
||||||
assertThat(stats.getMem().getFree().bytes(), anyOf(equalTo(-1L), greaterThan(0L)));
|
assertThat(stats.getMem().getFree().bytes(), greaterThan(0L));
|
||||||
assertThat(stats.getMem().getFreePercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100)));
|
assertThat(stats.getMem().getFreePercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100)));
|
||||||
assertThat(stats.getMem().getUsed().bytes(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L)));
|
assertThat(stats.getMem().getUsed().bytes(), greaterThan(0L));
|
||||||
assertThat(stats.getMem().getUsedPercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100)));
|
assertThat(stats.getMem().getUsedPercent(), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100)));
|
||||||
|
|
||||||
assertNotNull(stats.getSwap());
|
assertNotNull(stats.getSwap());
|
||||||
assertThat(stats.getSwap().getTotal().bytes(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L)));
|
assertNotNull(stats.getSwap().getTotal());
|
||||||
assertThat(stats.getSwap().getFree().bytes(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L)));
|
|
||||||
assertThat(stats.getSwap().getUsed().bytes(), anyOf(equalTo(-1L), greaterThanOrEqualTo(0L)));
|
long total = stats.getSwap().getTotal().bytes();
|
||||||
|
if (total > 0) {
|
||||||
|
assertThat(stats.getSwap().getTotal().bytes(), greaterThan(0L));
|
||||||
|
assertThat(stats.getSwap().getFree().bytes(), greaterThan(0L));
|
||||||
|
assertThat(stats.getSwap().getUsed().bytes(), greaterThanOrEqualTo(0L));
|
||||||
|
} else {
|
||||||
|
// On platforms with no swap
|
||||||
|
assertThat(stats.getSwap().getTotal().bytes(), equalTo(0L));
|
||||||
|
assertThat(stats.getSwap().getFree().bytes(), equalTo(0L));
|
||||||
|
assertThat(stats.getSwap().getUsed().bytes(), equalTo(0L));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.monitor.process;
|
package org.elasticsearch.monitor.process;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.Constants;
|
||||||
import org.elasticsearch.bootstrap.Bootstrap;
|
import org.elasticsearch.bootstrap.Bootstrap;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -43,14 +44,29 @@ public class ProcessProbeTests extends ESTestCase {
|
||||||
public void testProcessStats() {
|
public void testProcessStats() {
|
||||||
ProcessStats stats = probe.processStats();
|
ProcessStats stats = probe.processStats();
|
||||||
assertNotNull(stats);
|
assertNotNull(stats);
|
||||||
|
assertThat(stats.getTimestamp(), greaterThan(0L));
|
||||||
|
|
||||||
|
if (Constants.WINDOWS) {
|
||||||
|
// Open/Max files descriptors are not supported on Windows platforms
|
||||||
|
assertThat(stats.getOpenFileDescriptors(), equalTo(-1L));
|
||||||
|
assertThat(stats.getMaxFileDescriptors(), equalTo(-1L));
|
||||||
|
} else {
|
||||||
|
assertThat(stats.getOpenFileDescriptors(), greaterThan(0L));
|
||||||
|
assertThat(stats.getMaxFileDescriptors(), greaterThan(0L));
|
||||||
|
}
|
||||||
|
|
||||||
ProcessStats.Cpu cpu = stats.getCpu();
|
ProcessStats.Cpu cpu = stats.getCpu();
|
||||||
assertNotNull(cpu);
|
assertNotNull(cpu);
|
||||||
assertThat(cpu.getPercent(), greaterThanOrEqualTo((short) 0));
|
|
||||||
assertThat(cpu.total, anyOf(equalTo(-1L), greaterThan(0L)));
|
// CPU percent can be negative if the system recent cpu usage is not available
|
||||||
|
assertThat(cpu.getPercent(), anyOf(lessThan((short) 0), allOf(greaterThanOrEqualTo((short) 0), lessThanOrEqualTo((short) 100))));
|
||||||
|
|
||||||
|
// CPU time can return -1 if the the platform does not support this operation, let's see which platforms fail
|
||||||
|
assertThat(cpu.total, greaterThan(0L));
|
||||||
|
|
||||||
ProcessStats.Mem mem = stats.getMem();
|
ProcessStats.Mem mem = stats.getMem();
|
||||||
assertNotNull(mem);
|
assertNotNull(mem);
|
||||||
assertThat(mem.totalVirtual, anyOf(equalTo(-1L), greaterThan(0L)));
|
// Commited total virtual memory can return -1 if not supported, let's see which platforms fail
|
||||||
|
assertThat(mem.totalVirtual, greaterThan(0L));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,9 +52,7 @@ import javax.net.ssl.HttpsURLConnection;
|
||||||
import javax.net.ssl.SSLContext;
|
import javax.net.ssl.SSLContext;
|
||||||
import javax.net.ssl.SSLSocketFactory;
|
import javax.net.ssl.SSLSocketFactory;
|
||||||
import java.io.BufferedWriter;
|
import java.io.BufferedWriter;
|
||||||
import java.io.FileOutputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.PrintStream;
|
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.FileVisitResult;
|
import java.nio.file.FileVisitResult;
|
||||||
|
@ -539,17 +537,19 @@ public class PluginManagerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testOfficialPluginName_ThrowsException() throws IOException {
|
public void testOfficialPluginName_ThrowsException() throws IOException {
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-icu");
|
PluginManager.checkForOfficialPlugins("analysis-icu");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-kuromoji");
|
PluginManager.checkForOfficialPlugins("analysis-kuromoji");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-phonetic");
|
PluginManager.checkForOfficialPlugins("analysis-phonetic");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-smartcn");
|
PluginManager.checkForOfficialPlugins("analysis-smartcn");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-analysis-stempel");
|
PluginManager.checkForOfficialPlugins("analysis-stempel");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-aws");
|
PluginManager.checkForOfficialPlugins("cloud-aws");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-azure");
|
PluginManager.checkForOfficialPlugins("cloud-azure");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-cloud-gce");
|
PluginManager.checkForOfficialPlugins("cloud-gce");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-delete-by-query");
|
PluginManager.checkForOfficialPlugins("delete-by-query");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-lang-javascript");
|
PluginManager.checkForOfficialPlugins("lang-javascript");
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-lang-python");
|
PluginManager.checkForOfficialPlugins("lang-python");
|
||||||
|
PluginManager.checkForOfficialPlugins("mapper-murmur3");
|
||||||
|
PluginManager.checkForOfficialPlugins("mapper-size");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-attachment");
|
PluginManager.checkForOfficialPlugins("elasticsearch-mapper-attachment");
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class PluginManagerUnitTests extends ESTestCase {
|
||||||
.build();
|
.build();
|
||||||
Environment environment = new Environment(settings);
|
Environment environment = new Environment(settings);
|
||||||
|
|
||||||
PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user", "repo");
|
PluginManager.PluginHandle pluginHandle = new PluginManager.PluginHandle(pluginName, "version", "user");
|
||||||
String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).normalize().toString());
|
String configDirPath = Files.simplifyPath(pluginHandle.configDir(environment).normalize().toString());
|
||||||
String expectedDirPath = Files.simplifyPath(genericConfigFolder.resolve(pluginName).normalize().toString());
|
String expectedDirPath = Files.simplifyPath(genericConfigFolder.resolve(pluginName).normalize().toString());
|
||||||
|
|
||||||
|
@ -82,23 +82,23 @@ public class PluginManagerUnitTests extends ESTestCase {
|
||||||
Iterator<URL> iterator = handle.urls().iterator();
|
Iterator<URL> iterator = handle.urls().iterator();
|
||||||
|
|
||||||
if (supportStagingUrls) {
|
if (supportStagingUrls) {
|
||||||
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip",
|
String expectedStagingURL = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||||
Build.CURRENT.hashShort(), pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
|
Version.CURRENT.number(), Build.CURRENT.hashShort(), pluginName, Version.CURRENT.number(), pluginName, Version.CURRENT.number());
|
||||||
assertThat(iterator.next(), is(new URL(expectedStagingURL)));
|
assertThat(iterator.next().toExternalForm(), is(expectedStagingURL));
|
||||||
}
|
}
|
||||||
|
|
||||||
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-" + pluginName + "/" + Version.CURRENT.number() + "/elasticsearch-" +
|
URL expected = new URL("http", "download.elastic.co", "/elasticsearch/release/org/elasticsearch/plugin/" + pluginName + "/" + Version.CURRENT.number() + "/" +
|
||||||
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
pluginName + "-" + Version.CURRENT.number() + ".zip");
|
||||||
assertThat(iterator.next(), is(expected));
|
assertThat(iterator.next().toExternalForm(), is(expected.toExternalForm()));
|
||||||
|
|
||||||
assertThat(iterator.hasNext(), is(false));
|
assertThat(iterator.hasNext(), is(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTrimmingElasticsearchFromOfficialPluginName() throws IOException {
|
public void testOfficialPluginName() throws IOException {
|
||||||
String randomPluginName = randomFrom(PluginManager.OFFICIAL_PLUGINS.asList()).replaceFirst("elasticsearch-", "");
|
String randomPluginName = randomFrom(PluginManager.OFFICIAL_PLUGINS.asList());
|
||||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
|
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(randomPluginName);
|
||||||
assertThat(handle.name, is(randomPluginName.replaceAll("^elasticsearch-", "")));
|
assertThat(handle.name, is(randomPluginName));
|
||||||
|
|
||||||
boolean supportStagingUrls = randomBoolean();
|
boolean supportStagingUrls = randomBoolean();
|
||||||
if (supportStagingUrls) {
|
if (supportStagingUrls) {
|
||||||
|
@ -108,28 +108,26 @@ public class PluginManagerUnitTests extends ESTestCase {
|
||||||
Iterator<URL> iterator = handle.urls().iterator();
|
Iterator<URL> iterator = handle.urls().iterator();
|
||||||
|
|
||||||
if (supportStagingUrls) {
|
if (supportStagingUrls) {
|
||||||
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip",
|
String expectedStagingUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/staging/%s-%s/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||||
Build.CURRENT.hashShort(), randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
Version.CURRENT.number(), Build.CURRENT.hashShort(), randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||||
assertThat(iterator.next(), is(new URL(expectedStagingUrl)));
|
assertThat(iterator.next().toExternalForm(), is(expectedStagingUrl));
|
||||||
}
|
}
|
||||||
|
|
||||||
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/elasticsearch-%s/%s/elasticsearch-%s-%s.zip",
|
String releaseUrl = String.format(Locale.ROOT, "http://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%s/%s/%s-%s.zip",
|
||||||
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
randomPluginName, Version.CURRENT.number(), randomPluginName, Version.CURRENT.number());
|
||||||
assertThat(iterator.next(), is(new URL(releaseUrl)));
|
assertThat(iterator.next().toExternalForm(), is(releaseUrl));
|
||||||
|
|
||||||
assertThat(iterator.hasNext(), is(false));
|
assertThat(iterator.hasNext(), is(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTrimmingElasticsearchFromGithubPluginName() throws IOException {
|
public void testGithubPluginName() throws IOException {
|
||||||
String user = randomAsciiOfLength(6);
|
String user = randomAsciiOfLength(6);
|
||||||
String randomName = randomAsciiOfLength(10);
|
String pluginName = randomAsciiOfLength(10);
|
||||||
String pluginName = randomFrom("elasticsearch-", "es-") + randomName;
|
|
||||||
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(user + "/" + pluginName);
|
PluginManager.PluginHandle handle = PluginManager.PluginHandle.parse(user + "/" + pluginName);
|
||||||
assertThat(handle.name, is(randomName));
|
assertThat(handle.name, is(pluginName));
|
||||||
assertThat(handle.urls(), hasSize(1));
|
assertThat(handle.urls(), hasSize(1));
|
||||||
URL expected = new URL("https", "github.com", "/" + user + "/" + pluginName + "/" + "archive/master.zip");
|
assertThat(handle.urls().get(0).toExternalForm(), is(new URL("https", "github.com", "/" + user + "/" + pluginName + "/" + "archive/master.zip").toExternalForm()));
|
||||||
assertThat(handle.urls().get(0), is(expected));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -402,7 +402,7 @@ public class RelocationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
// Slow down recovery in order to make recovery cancellations more likely
|
// Slow down recovery in order to make recovery cancellations more likely
|
||||||
IndicesStatsResponse statsResponse = client().admin().indices().prepareStats(indexName).get();
|
IndicesStatsResponse statsResponse = client().admin().indices().prepareStats(indexName).get();
|
||||||
long chunkSize = statsResponse.getIndex(indexName).getShards()[0].getStats().getStore().size().bytes() / 10;
|
long chunkSize = Math.max(1, statsResponse.getIndex(indexName).getShards()[0].getStats().getStore().size().bytes() / 10);
|
||||||
assertTrue(client().admin().cluster().prepareUpdateSettings()
|
assertTrue(client().admin().cluster().prepareUpdateSettings()
|
||||||
.setTransientSettings(Settings.builder()
|
.setTransientSettings(Settings.builder()
|
||||||
// one chunk per sec..
|
// one chunk per sec..
|
||||||
|
|
|
@ -61,54 +61,23 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
jsonBuilder().startObject().startObject("type").startObject("properties")
|
jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||||
.startObject("str_value")
|
.startObject("str_value")
|
||||||
.field("type", "string")
|
.field("type", "string")
|
||||||
.startObject("fields")
|
|
||||||
.startObject("hash")
|
|
||||||
.field("type", "murmur3")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject("str_values")
|
.startObject("str_values")
|
||||||
.field("type", "string")
|
.field("type", "string")
|
||||||
.startObject("fields")
|
|
||||||
.startObject("hash")
|
|
||||||
.field("type", "murmur3")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject("l_value")
|
.startObject("l_value")
|
||||||
.field("type", "long")
|
.field("type", "long")
|
||||||
.startObject("fields")
|
|
||||||
.startObject("hash")
|
|
||||||
.field("type", "murmur3")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject("l_values")
|
.startObject("l_values")
|
||||||
.field("type", "long")
|
.field("type", "long")
|
||||||
.startObject("fields")
|
|
||||||
.startObject("hash")
|
|
||||||
.field("type", "murmur3")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
.endObject()
|
||||||
.startObject("d_value")
|
.startObject("d_value")
|
||||||
.field("type", "double")
|
.field("type", "double")
|
||||||
.startObject("fields")
|
.endObject()
|
||||||
.startObject("hash")
|
.startObject("d_values")
|
||||||
.field("type", "murmur3")
|
.field("type", "double")
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject()
|
.endObject().endObject().endObject()).execute().actionGet();
|
||||||
.endObject()
|
|
||||||
.startObject("d_values")
|
|
||||||
.field("type", "double")
|
|
||||||
.startObject("fields")
|
|
||||||
.startObject("hash")
|
|
||||||
.field("type", "murmur3")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject().endObject()).execute().actionGet();
|
|
||||||
|
|
||||||
numDocs = randomIntBetween(2, 100);
|
numDocs = randomIntBetween(2, 100);
|
||||||
precisionThreshold = randomIntBetween(0, 1 << randomInt(20));
|
precisionThreshold = randomIntBetween(0, 1 << randomInt(20));
|
||||||
|
@ -145,12 +114,12 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
assertThat(count.getValue(), greaterThan(0L));
|
assertThat(count.getValue(), greaterThan(0L));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
private String singleNumericField(boolean hash) {
|
private String singleNumericField() {
|
||||||
return (randomBoolean() ? "l_value" : "d_value") + (hash ? ".hash" : "");
|
return randomBoolean() ? "l_value" : "d_value";
|
||||||
}
|
}
|
||||||
|
|
||||||
private String multiNumericField(boolean hash) {
|
private String multiNumericField(boolean hash) {
|
||||||
return (randomBoolean() ? "l_values" : "d_values") + (hash ? ".hash" : "");
|
return randomBoolean() ? "l_values" : "d_values";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -195,24 +164,10 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
assertCount(count, numDocs);
|
assertCount(count, numDocs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void singleValuedStringHashed() throws Exception {
|
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value.hash"))
|
|
||||||
.execute().actionGet();
|
|
||||||
|
|
||||||
assertSearchResponse(response);
|
|
||||||
|
|
||||||
Cardinality count = response.getAggregations().get("cardinality");
|
|
||||||
assertThat(count, notNullValue());
|
|
||||||
assertThat(count.getName(), equalTo("cardinality"));
|
|
||||||
assertCount(count, numDocs);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void singleValuedNumeric() throws Exception {
|
public void singleValuedNumeric() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField(false)))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
|
@ -229,7 +184,7 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
global("global").subAggregation(
|
global("global").subAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField(false))))
|
cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertSearchResponse(searchResponse);
|
assertSearchResponse(searchResponse);
|
||||||
|
@ -254,7 +209,7 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void singleValuedNumericHashed() throws Exception {
|
public void singleValuedNumericHashed() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField(true)))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
|
@ -279,20 +234,6 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
assertCount(count, numDocs * 2);
|
assertCount(count, numDocs * 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void multiValuedStringHashed() throws Exception {
|
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values.hash"))
|
|
||||||
.execute().actionGet();
|
|
||||||
|
|
||||||
assertSearchResponse(response);
|
|
||||||
|
|
||||||
Cardinality count = response.getAggregations().get("cardinality");
|
|
||||||
assertThat(count, notNullValue());
|
|
||||||
assertThat(count.getName(), equalTo("cardinality"));
|
|
||||||
assertCount(count, numDocs * 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void multiValuedNumeric() throws Exception {
|
public void multiValuedNumeric() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
||||||
|
@ -356,7 +297,7 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold).script(
|
cardinality("cardinality").precisionThreshold(precisionThreshold).script(
|
||||||
new Script("doc['" + singleNumericField(false) + "'].value")))
|
new Script("doc['" + singleNumericField() + "'].value")))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
|
@ -417,7 +358,7 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
public void singleValuedNumericValueScript() throws Exception {
|
public void singleValuedNumericValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField(false))
|
cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField())
|
||||||
.script(new Script("_value")))
|
.script(new Script("_value")))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
|
@ -464,23 +405,4 @@ public class CardinalityIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void asSubAggHashed() throws Exception {
|
|
||||||
SearchResponse response = client().prepareSearch("idx").setTypes("type")
|
|
||||||
.addAggregation(terms("terms").field("str_value")
|
|
||||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
|
||||||
.subAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values.hash")))
|
|
||||||
.execute().actionGet();
|
|
||||||
|
|
||||||
assertSearchResponse(response);
|
|
||||||
|
|
||||||
Terms terms = response.getAggregations().get("terms");
|
|
||||||
for (Terms.Bucket bucket : terms.getBuckets()) {
|
|
||||||
Cardinality count = bucket.getAggregations().get("cardinality");
|
|
||||||
assertThat(count, notNullValue());
|
|
||||||
assertThat(count.getName(), equalTo("cardinality"));
|
|
||||||
assertCount(count, 2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -574,7 +574,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
||||||
"type",
|
"type",
|
||||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
||||||
.endObject().startObject("date").field("type", "date").endObject().startObject("num").field("type", "double")
|
.endObject().startObject("date").field("type", "date").endObject().startObject("num").field("type", "double")
|
||||||
.endObject().startObject("geo").field("type", "geo_point").endObject().endObject().endObject().endObject()));
|
.endObject().startObject("geo").field("type", "geo_point").field("coerce", true).endObject().endObject()
|
||||||
|
.endObject().endObject()));
|
||||||
ensureYellow();
|
ensureYellow();
|
||||||
int numDocs = 200;
|
int numDocs = 200;
|
||||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||||
|
|
|
@ -289,50 +289,50 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
|
||||||
SearchResponse searchResponse = client().prepareSearch()
|
SearchResponse searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, -180).bottomRight(-50, 180))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, -180).bottomRight(-50, 180).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, -180).bottomRight(-90, 180))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, -180).bottomRight(-90, 180).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, 0).bottomRight(-50, 360))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, 0).bottomRight(-50, 360).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, 0).bottomRight(-90, 360))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, 0).bottomRight(-90, 360).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
}
|
}
|
||||||
|
|
|
@ -221,8 +221,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||||
public void testDistanceSortingMVFields() throws Exception {
|
public void testDistanceSortingMVFields() throws Exception {
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||||
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
|
.field("ignore_malformed", true).field("coerce", true).startObject("fielddata")
|
||||||
.endObject().endObject();
|
.field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject().endObject().endObject();
|
||||||
assertAcked(prepareCreate("test")
|
assertAcked(prepareCreate("test")
|
||||||
.addMapping("type1", xContentBuilder));
|
.addMapping("type1", xContentBuilder));
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
@ -233,6 +233,11 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
|
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
|
||||||
|
.field("names", "New York 2")
|
||||||
|
.startObject("locations").field("lat", 400.7143528).field("lon", 285.9990269).endObject()
|
||||||
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
|
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
|
||||||
.field("names", "Times Square", "Tribeca")
|
.field("names", "Times Square", "Tribeca")
|
||||||
.startArray("locations")
|
.startArray("locations")
|
||||||
// to NY: 5.286 km
|
// to NY: 5.286 km
|
||||||
|
@ -242,7 +247,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||||
.endArray()
|
.endArray()
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
|
client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
|
||||||
.field("names", "Wall Street", "Soho")
|
.field("names", "Wall Street", "Soho")
|
||||||
.startArray("locations")
|
.startArray("locations")
|
||||||
// to NY: 1.055 km
|
// to NY: 1.055 km
|
||||||
|
@ -253,7 +258,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
|
client().prepareIndex("test", "type1", "5").setSource(jsonBuilder().startObject()
|
||||||
.field("names", "Greenwich Village", "Brooklyn")
|
.field("names", "Greenwich Village", "Brooklyn")
|
||||||
.startArray("locations")
|
.startArray("locations")
|
||||||
// to NY: 2.029 km
|
// to NY: 2.029 km
|
||||||
|
@ -270,70 +275,76 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4");
|
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4", "5");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
||||||
|
|
||||||
// Order: Asc, Mode: max
|
// Order: Asc, Mode: max
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
||||||
|
|
||||||
// Order: Desc
|
// Order: Desc
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "4", "2", "3", "1");
|
assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
|
|
||||||
// Order: Desc, Mode: min
|
// Order: Desc, Mode: min
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "4", "3", "2", "1");
|
assertOrderedSearchHits(searchResponse, "5", "4", "3", "2", "1");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1157d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2874d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2874d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "4", "2", "3", "1");
|
assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
|
|
||||||
assertFailures(client().prepareSearch("test").setQuery(matchAllQuery())
|
assertFailures(client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")),
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")),
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.internal;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
|
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||||
|
import org.elasticsearch.common.text.StringText;
|
||||||
|
import org.elasticsearch.search.SearchShardTarget;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
|
public class InternalSearchHitTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testSerializeShardTarget() throws Exception {
|
||||||
|
SearchShardTarget target = new SearchShardTarget("_node_id", "_index", 0);
|
||||||
|
|
||||||
|
Map<String, InternalSearchHits> innerHits = new HashMap<>();
|
||||||
|
InternalSearchHit innerHit1 = new InternalSearchHit(0, "_id", new StringText("_type"), null);
|
||||||
|
innerHit1.shardTarget(target);
|
||||||
|
InternalSearchHit innerInnerHit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null);
|
||||||
|
innerInnerHit2.shardTarget(target);
|
||||||
|
innerHits.put("1", new InternalSearchHits(new InternalSearchHit[]{innerInnerHit2}, 1, 1f));
|
||||||
|
innerHit1.setInnerHits(innerHits);
|
||||||
|
InternalSearchHit innerHit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null);
|
||||||
|
innerHit2.shardTarget(target);
|
||||||
|
InternalSearchHit innerHit3 = new InternalSearchHit(0, "_id", new StringText("_type"), null);
|
||||||
|
innerHit3.shardTarget(target);
|
||||||
|
|
||||||
|
innerHits = new HashMap<>();
|
||||||
|
InternalSearchHit hit1 = new InternalSearchHit(0, "_id", new StringText("_type"), null);
|
||||||
|
innerHits.put("1", new InternalSearchHits(new InternalSearchHit[]{innerHit1, innerHit2}, 1, 1f));
|
||||||
|
innerHits.put("2", new InternalSearchHits(new InternalSearchHit[]{innerHit3}, 1, 1f));
|
||||||
|
hit1.shardTarget(target);
|
||||||
|
hit1.setInnerHits(innerHits);
|
||||||
|
|
||||||
|
InternalSearchHit hit2 = new InternalSearchHit(0, "_id", new StringText("_type"), null);
|
||||||
|
hit2.shardTarget(target);
|
||||||
|
|
||||||
|
InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[]{hit1, hit2}, 2, 1f);
|
||||||
|
|
||||||
|
InternalSearchHits.StreamContext context = new InternalSearchHits.StreamContext();
|
||||||
|
context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM);
|
||||||
|
BytesStreamOutput output = new BytesStreamOutput();
|
||||||
|
hits.writeTo(output, context);
|
||||||
|
InputStream input = new ByteArrayInputStream(output.bytes().toBytes());
|
||||||
|
context = new InternalSearchHits.StreamContext();
|
||||||
|
context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM);
|
||||||
|
InternalSearchHits results = InternalSearchHits.readSearchHits(new InputStreamStreamInput(input), context);
|
||||||
|
assertThat(results.getAt(0).shard(), equalTo(target));
|
||||||
|
assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).shard(), nullValue());
|
||||||
|
assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).shard(), nullValue());
|
||||||
|
assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).shard(), nullValue());
|
||||||
|
assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).shard(), nullValue());
|
||||||
|
assertThat(results.getAt(1).shard(), equalTo(target));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue