merged master

This commit is contained in:
Michael McCandless 2015-12-15 05:07:19 -05:00 committed by mikemccand
commit 27d8509f0e
2957 changed files with 97602 additions and 84875 deletions

85
.dir-locals.el Normal file
View File

@ -0,0 +1,85 @@
((java-mode
.
((eval
.
(progn
(defun my/point-in-defun-declaration-p ()
(let ((bod (save-excursion (c-beginning-of-defun)
(point))))
(<= bod
(point)
(save-excursion (goto-char bod)
(re-search-forward "{")
(point)))))
(defun my/is-string-concatenation-p ()
"Returns true if the previous line is a string concatenation"
(save-excursion
(let ((start (point)))
(forward-line -1)
(if (re-search-forward " \\\+$" start t) t nil))))
(defun my/inside-java-lambda-p ()
"Returns true if point is the first statement inside of a lambda"
(save-excursion
(c-beginning-of-statement-1)
(let ((start (point)))
(forward-line -1)
(if (search-forward " -> {" start t) t nil))))
(defun my/trailing-paren-p ()
"Returns true if point is a training paren and semicolon"
(save-excursion
(end-of-line)
(let ((endpoint (point)))
(beginning-of-line)
(if (re-search-forward "[ ]*);$" endpoint t) t nil))))
(defun my/prev-line-call-with-no-args-p ()
"Return true if the previous line is a function call with no arguments"
(save-excursion
(let ((start (point)))
(forward-line -1)
(if (re-search-forward ".($" start t) t nil))))
(defun my/arglist-cont-nonempty-indentation (arg)
(if (my/inside-java-lambda-p)
'+
(if (my/is-string-concatenation-p)
16
(unless (my/point-in-defun-declaration-p) '++))))
(defun my/statement-block-intro (arg)
(if (and (c-at-statement-start-p) (my/inside-java-lambda-p)) 0 '+))
(defun my/block-close (arg)
(if (my/inside-java-lambda-p) '- 0))
(defun my/arglist-close (arg) (if (my/trailing-paren-p) 0 '--))
(defun my/arglist-intro (arg)
(if (my/prev-line-call-with-no-args-p) '++ 0))
(c-set-offset 'inline-open 0)
(c-set-offset 'topmost-intro-cont '+)
(c-set-offset 'statement-block-intro 'my/statement-block-intro)
(c-set-offset 'block-close 'my/block-close)
(c-set-offset 'knr-argdecl-intro '+)
(c-set-offset 'substatement-open '+)
(c-set-offset 'substatement-label '+)
(c-set-offset 'case-label '+)
(c-set-offset 'label '+)
(c-set-offset 'statement-case-open '+)
(c-set-offset 'statement-cont '++)
(c-set-offset 'arglist-intro 'my/arglist-intro)
(c-set-offset 'arglist-cont-nonempty '(my/arglist-cont-nonempty-indentation c-lineup-arglist))
(c-set-offset 'arglist-close 'my/arglist-close)
(c-set-offset 'inexpr-class 0)
(c-set-offset 'access-label 0)
(c-set-offset 'inher-intro '++)
(c-set-offset 'inher-cont '++)
(c-set-offset 'brace-list-intro '+)
(c-set-offset 'func-decl-cont '++)
))
(c-basic-offset . 4)
(c-comment-only-line-offset . (0 . 0)))))

10
.editorconfig Normal file
View File

@ -0,0 +1,10 @@
# EditorConfig: http://editorconfig.org/
root = true
[*.java]
charset = utf-8
indent_style = space
indent_size = 4
trim_trailing_whitespace = true
insert_final_newline = true

8
.gitignore vendored
View File

@ -8,8 +8,8 @@ work/
logs/
.DS_Store
build/
target/
*-execution-hints.log
generated-resources/
**/.local*
docs/html/
docs/build.log
/tmp/
@ -31,3 +31,7 @@ nb-configuration.xml
nbactions.xml
dependency-reduced-pom.xml
# old patterns specific to maven
*-execution-hints.log
target/

32
.projectile Normal file
View File

@ -0,0 +1,32 @@
-/target
-/core/target
-/qa/target
-/rest-api-spec/target
-/test-framework/target
-/plugins/target
-/plugins/analysis-icu/target
-/plugins/analysis-kuromoji/target
-/plugins/analysis-phonetic/target
-/plugins/analysis-smartcn/target
-/plugins/analysis-stempel/target
-/plugins/cloud-aws/target
-/plugins/cloud-azure/target
-/plugins/cloud-gce/target
-/plugins/delete-by-query/target
-/plugins/discovery-azure/target
-/plugins/discovery-ec2/target
-/plugins/discovery-gce/target
-/plugins/discovery-multicast/target
-/plugins/jvm-example/target
-/plugins/lang-expression/target
-/plugins/lang-groovy/target
-/plugins/lang-javascript/target
-/plugins/lang-python/target
-/plugins/mapper-murmur3/target
-/plugins/mapper-size/target
-/plugins/repository-azure/target
-/plugins/repository-s3/target
-/plugins/site-example/target
-/plugins/store-smb/target
-/plugins/target
-*.class

View File

@ -1,18 +0,0 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=enabled
org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
org.eclipse.jdt.core.compiler.annotation.nullable=org.elasticsearch.common.Nullable
org.eclipse.jdt.core.compiler.annotation.nullanalysis=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=warning
org.eclipse.jdt.core.compiler.problem.nullReference=warning
org.eclipse.jdt.core.compiler.problem.nullSpecViolation=warning
org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning
org.eclipse.jdt.core.compiler.source=1.7
org.eclipse.jdt.core.formatter.lineSplit=140
org.eclipse.jdt.core.formatter.tabulation.char=space
org.eclipse.jdt.core.formatter.tabulation.size=4

View File

@ -76,9 +76,7 @@ Contributing to the Elasticsearch codebase
**Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch)
Make sure you have [Maven](http://maven.apache.org) installed, as Elasticsearch uses it as its build system. Integration with IntelliJ and Eclipse should work out of the box. Eclipse users can automatically configure their IDE by running `mvn eclipse:eclipse` and then importing the project into their workspace: `File > Import > Existing project into workspace` and make sure to select `Search for nested projects...` option as Elasticsearch is a multi-module maven project. Additionally you will want to ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini` accordingly to avoid GC overhead errors. Please make sure the [m2e-connector](http://marketplace.eclipse.org/content/m2e-connector-maven-dependency-plugin) is not installed in your Eclipse distribution as it will interfere with setup performed by `mvn eclipse:eclipse`.
Elasticsearch also works perfectly with Eclipse's [m2e](http://www.eclipse.org/m2e/). Once you've installed m2e you can import Elasticsearch as an `Existing Maven Project`.
Make sure you have [Gradle](http://gradle.org) installed, as Elasticsearch uses it as its build system. Integration with IntelliJ and Eclipse should work out of the box. Eclipse users can automatically configure their IDE: `gradle eclipse` then `File: Import: Existing Projects into Workspace`. Select the option `Search for nested projects`. Additionally you will want to ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini` accordingly to avoid GC overhead errors.
Please follow these formatting guidelines:
@ -92,15 +90,15 @@ To create a distribution from the source, simply run:
```sh
cd elasticsearch/
mvn clean package -DskipTests
gradle assemble
```
You will find the newly built packages under: `./target/releases/`.
You will find the newly built packages under: `./distribution/build/distributions/`.
Before submitting your changes, run the test suite to make sure that nothing is broken, with:
```sh
mvn clean test -Dtests.slow=true
gradle check
```
Source: [Contributing to elasticsearch](https://www.elastic.co/contributing-to-elasticsearch/)

7
GRADLE.CHEATSHEET Normal file
View File

@ -0,0 +1,7 @@
As a quick helper, below are the equivalent commands from maven to gradle (TESTING.md has also been updated). You can also run "gradle tasks" to see all tasks that are available to run.
clean -> clean
test -> test
verify -> check
verify -Dskip.unit.tests -> integTest
package -DskipTests -> assemble
install -DskipTests -> install

View File

@ -200,19 +200,22 @@ We have just covered a very small portion of what Elasticsearch is all about. Fo
h3. Building from Source
Elasticsearch uses "Maven":http://maven.apache.org for its build system.
Elasticsearch uses "Gradle":http://gradle.org for its build system. You'll need to have a modern version of Gradle installed - 2.8 should do.
In order to create a distribution, simply run the @mvn clean package
-DskipTests@ command in the cloned directory.
In order to create a distribution, simply run the @gradle build@ command in the cloned directory.
The distribution for each project will be created under the @target/releases@ directory in that project.
See the "TESTING":TESTING.asciidoc file for more information about
running the Elasticsearch test suite.
h3. Upgrading to Elasticsearch 1.x?
h3. Upgrading from Elasticsearch 1.x?
In order to ensure a smooth upgrade process from earlier versions of Elasticsearch (< 1.0.0), it is recommended to perform a full cluster restart. Please see the "setup reference":https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html for more details on the upgrade process.
In order to ensure a smooth upgrade process from earlier versions of
Elasticsearch (1.x), it is required to perform a full cluster restart. Please
see the "setup reference":
https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html
for more details on the upgrade process.
h1. License

View File

@ -13,7 +13,7 @@ To create a distribution without running the tests, simply run the
following:
-----------------------------
mvn clean package -DskipTests
gradle assemble
-----------------------------
== Other test options
@ -35,7 +35,7 @@ Use local transport (default since 1.3):
Alternatively, you can set the `ES_TEST_LOCAL` environment variable:
-------------------------------------
export ES_TEST_LOCAL=true && mvn test
export ES_TEST_LOCAL=true && gradle test
-------------------------------------
=== Running Elasticsearch from a checkout
@ -44,7 +44,7 @@ In order to run Elasticsearch from source without building a package, you can
run it using Maven:
-------------------------------------
./run.sh
gradle run
-------------------------------------
=== Test case filtering.
@ -55,20 +55,20 @@ run it using Maven:
Run a single test case (variants)
----------------------------------------------------------
mvn test -Dtests.class=org.elasticsearch.package.ClassName
mvn test "-Dtests.class=*.ClassName"
gradle test -Dtests.class=org.elasticsearch.package.ClassName
gradle test "-Dtests.class=*.ClassName"
----------------------------------------------------------
Run all tests in a package and sub-packages
----------------------------------------------------
mvn test "-Dtests.class=org.elasticsearch.package.*"
gradle test "-Dtests.class=org.elasticsearch.package.*"
----------------------------------------------------
Run any test methods that contain 'esi' (like: ...r*esi*ze...).
-------------------------------
mvn test "-Dtests.method=*esi*"
gradle test "-Dtests.method=*esi*"
-------------------------------
You can also filter tests by certain annotations ie:
@ -81,7 +81,7 @@ You can also filter tests by certain annotations ie:
Those annotation names can be combined into a filter expression like:
------------------------------------------------
mvn test -Dtests.filter="@nightly and not @backwards"
gradle test -Dtests.filter="@nightly and not @backwards"
------------------------------------------------
to run all nightly test but not the ones that are backwards tests. `tests.filter` supports
@ -89,7 +89,7 @@ the boolean operators `and, or, not` and grouping ie:
---------------------------------------------------------------
mvn test -Dtests.filter="@nightly and not(@badapple or @backwards)"
gradle test -Dtests.filter="@nightly and not(@badapple or @backwards)"
---------------------------------------------------------------
=== Seed and repetitions.
@ -97,7 +97,7 @@ mvn test -Dtests.filter="@nightly and not(@badapple or @backwards)"
Run with a given seed (seed is a hex-encoded long).
------------------------------
mvn test -Dtests.seed=DEADBEEF
gradle test -Dtests.seed=DEADBEEF
------------------------------
=== Repeats _all_ tests of ClassName N times.
@ -106,7 +106,7 @@ Every test repetition will have a different method seed
(derived from a single random master seed).
--------------------------------------------------
mvn test -Dtests.iters=N -Dtests.class=*.ClassName
gradle test -Dtests.iters=N -Dtests.class=*.ClassName
--------------------------------------------------
=== Repeats _all_ tests of ClassName N times.
@ -115,7 +115,7 @@ Every test repetition will have exactly the same master (0xdead) and
method-level (0xbeef) seed.
------------------------------------------------------------------------
mvn test -Dtests.iters=N -Dtests.class=*.ClassName -Dtests.seed=DEAD:BEEF
gradle test -Dtests.iters=N -Dtests.class=*.ClassName -Dtests.seed=DEAD:BEEF
------------------------------------------------------------------------
=== Repeats a given test N times
@ -125,14 +125,14 @@ ie: testFoo[0], testFoo[1], etc... so using testmethod or tests.method
ending in a glob is necessary to ensure iterations are run).
-------------------------------------------------------------------------
mvn test -Dtests.iters=N -Dtests.class=*.ClassName -Dtests.method=mytest*
gradle test -Dtests.iters=N -Dtests.class=*.ClassName -Dtests.method=mytest*
-------------------------------------------------------------------------
Repeats N times but skips any tests after the first failure or M initial failures.
-------------------------------------------------------------
mvn test -Dtests.iters=N -Dtests.failfast=true -Dtestcase=...
mvn test -Dtests.iters=N -Dtests.maxfailures=M -Dtestcase=...
gradle test -Dtests.iters=N -Dtests.failfast=true -Dtestcase=...
gradle test -Dtests.iters=N -Dtests.maxfailures=M -Dtestcase=...
-------------------------------------------------------------
=== Test groups.
@ -142,24 +142,30 @@ Test groups can be enabled or disabled (true/false).
Default value provided below in [brackets].
------------------------------------------------------------------
mvn test -Dtests.nightly=[false] - nightly test group (@Nightly)
mvn test -Dtests.weekly=[false] - weekly tests (@Weekly)
mvn test -Dtests.awaitsfix=[false] - known issue (@AwaitsFix)
gradle test -Dtests.nightly=[false] - nightly test group (@Nightly)
gradle test -Dtests.weekly=[false] - weekly tests (@Weekly)
gradle test -Dtests.awaitsfix=[false] - known issue (@AwaitsFix)
------------------------------------------------------------------
=== Load balancing and caches.
By default, the tests run sequentially on a single forked JVM.
To run with more forked JVMs than the default use:
By default the tests run on up to 4 JVMs based on the number of cores. If you
want to explicitly specify the number of JVMs you can do so on the command
line:
----------------------------
mvn test -Dtests.jvms=8 test
gradle test -Dtests.jvms=8
----------------------------
Don't count hypercores for CPU-intense tests and leave some slack
for JVM-internal threads (like the garbage collector). Make sure there is
enough RAM to handle child JVMs.
Or in `~/.gradle/gradle.properties`:
----------------------------
systemProp.tests.jvms=8
----------------------------
Its difficult to pick the "right" number here. Hypercores don't count for CPU
intensive tests and you should leave some slack for JVM-interal threads like
the garbage collector. And you have to have enough RAM to handle each JVM.
=== Test compatibility.
@ -167,7 +173,7 @@ It is possible to provide a version that allows to adapt the tests behaviour
to older features or bugs that have been changed or fixed in the meantime.
-----------------------------------------
mvn test -Dtests.compatibility=1.0.0
gradle test -Dtests.compatibility=1.0.0
-----------------------------------------
@ -176,45 +182,50 @@ mvn test -Dtests.compatibility=1.0.0
Run all tests without stopping on errors (inspect log files).
-----------------------------------------
mvn test -Dtests.haltonfailure=false test
gradle test -Dtests.haltonfailure=false
-----------------------------------------
Run more verbose output (slave JVM parameters, etc.).
----------------------
mvn test -verbose test
gradle test -verbose
----------------------
Change the default suite timeout to 5 seconds for all
tests (note the exclamation mark).
---------------------------------------
mvn test -Dtests.timeoutSuite=5000! ...
gradle test -Dtests.timeoutSuite=5000! ...
---------------------------------------
Change the logging level of ES (not mvn)
Change the logging level of ES (not gradle)
--------------------------------
mvn test -Des.logger.level=DEBUG
gradle test -Des.logger.level=DEBUG
--------------------------------
Print all the logging output from the test runs to the commandline
even if tests are passing.
------------------------------
mvn test -Dtests.output=always
gradle test -Dtests.output=always
------------------------------
Configure the heap size.
------------------------------
mvn test -Dtests.heap.size=512m
gradle test -Dtests.heap.size=512m
------------------------------
Pass arbitrary jvm arguments.
------------------------------
mvn test -Dtests.jvm.argline="-XX:HeapDumpPath=/path/to/heapdumps"
# specify heap dump path
gradle test -Dtests.jvm.argline="-XX:HeapDumpPath=/path/to/heapdumps"
# enable gc logging
gradle test -Dtests.jvm.argline="-verbose:gc"
# enable security debugging
gradle test -Dtests.jvm.argline="-Djava.security.debug=access,failure"
------------------------------
== Backwards Compatibility Tests
@ -225,7 +236,7 @@ To run backwards compatibilty tests untar or unzip a release and run the tests
with the following command:
---------------------------------------------------------------------------
mvn test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.bwc.path=/path/to/elasticsearch -Dtests.security.manager=false
gradle test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.bwc.path=/path/to/elasticsearch -Dtests.security.manager=false
---------------------------------------------------------------------------
Note that backwards tests must be run with security manager disabled.
@ -233,7 +244,7 @@ If the elasticsearch release is placed under `./backwards/elasticsearch-x.y.z` t
can be omitted:
---------------------------------------------------------------------------
mvn test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.security.manager=false
gradle test -Dtests.filter="@backwards" -Dtests.bwc.version=x.y.z -Dtests.security.manager=false
---------------------------------------------------------------------------
To setup the bwc test environment execute the following steps (provided you are
@ -245,19 +256,25 @@ $ curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elastic
$ tar -xzf elasticsearch-1.2.1.tar.gz
---------------------------------------------------------------------------
== Running integration tests
== Running verification tasks
To run the integration tests:
To run all verification tasks, including static checks, unit tests, and integration tests:
---------------------------------------------------------------------------
mvn verify
gradle check
---------------------------------------------------------------------------
Note that this will also run the unit tests first. If you want to just
run the integration tests only (because you are debugging them):
Note that this will also run the unit tests and precommit tasks first. If you want to just
run the integration tests (because you are debugging them):
---------------------------------------------------------------------------
mvn verify -Dskip.unit.tests
gradle integTest
---------------------------------------------------------------------------
If you want to just run the precommit checks:
---------------------------------------------------------------------------
gradle precommit
---------------------------------------------------------------------------
== Testing the REST layer
@ -269,11 +286,20 @@ The REST layer is tested through specific tests that are shared between all
the elasticsearch official clients and consist of YAML files that describe the
operations to be executed and the obtained results that need to be tested.
The REST tests are run automatically when executing the maven test command. To run only the
The REST tests are run automatically when executing the "gradle check" command. To run only the
REST tests use the following command:
---------------------------------------------------------------------------
mvn verify -Dtests.filter="@Rest"
gradle :distribution:tar:integTest \
-Dtests.class=org.elasticsearch.test.rest.RestIT
---------------------------------------------------------------------------
A specific test case can be run with
---------------------------------------------------------------------------
gradle :distribution:tar:integTest \
-Dtests.class=org.elasticsearch.test.rest.RestIT \
-Dtests.method="test {p0=cat.shards/10_basic/Help}"
---------------------------------------------------------------------------
`RestNIT` are the executable test classes that runs all the
@ -298,20 +324,6 @@ comma separated list of nodes to connect to (e.g. localhost:9300). A transport c
be created based on that and used for all the before|after test operations, and to extract
the http addresses of the nodes so that REST requests can be sent to them.
== Skip validate
To disable validation step (forbidden API or `// NOCOMMIT`) use
---------------------------------------------------------------------------
mvn test -Dvalidate.skip=true
---------------------------------------------------------------------------
You can also skip this by using the "dev" profile:
---------------------------------------------------------------------------
mvn test -Pdev
---------------------------------------------------------------------------
== Testing scripts
The simplest way to test scripts and the packaged distributions is to use
@ -329,152 +341,63 @@ vagrant plugin install vagrant-cachier
. Validate your installed dependencies:
-------------------------------------
mvn -Dtests.vagrant -pl qa/vagrant validate
gradle :qa:vagrant:checkVagrantVersion
-------------------------------------
. Download the VMs. Since Maven or ant or something eats the progress reports
from Vagrant when you run it inside mvn its probably best if you run this one
time to setup all the VMs one at a time. Run this to download and setup the VMs
we use for testing by default:
. Download and smoke test the VMs with `gradle vagrantSmokeTest` or
`gradle vagrantSmokeTestAllDistros`. The first time you run this it will
download the base images and provision the boxes and immediately quit. If you
you this again it'll skip the download step.
--------------------------------------------------------
vagrant up --provision trusty --provider virtualbox && vagrant halt trusty
vagrant up --provision centos-7 --provider virtualbox && vagrant halt centos-7
--------------------------------------------------------
. Run the tests with `gradle checkPackages`. This will cause gradle to build
the tar, zip, and deb packages and all the plugins. It will then run the tests
on ubuntu-1404 and centos-7. We chose those two distributions as the default
because they cover deb and rpm packaging and SyvVinit and systemd.
or run this to download and setup all the VMs:
You can run on all the VMs by running `gradle checkPackagesAllDistros`. You can
run a particular VM with a command like `gradle checkOel7`. See `gradle tasks`
for a list. Its important to know that if you ctrl-c any of these `gradle`
commands then the boxes will remain running and you'll have to terminate them
with `vagrant halt`.
-------------------------------------------------------------------------------
vagrant halt
for box in $(vagrant status | grep 'poweroff\|not created' | cut -f1 -d' '); do
vagrant up --provision $box --provider virtualbox
vagrant halt $box
done
-------------------------------------------------------------------------------
. Smoke test the maven/ant dance that we use to get vagrant involved in
integration testing is working:
---------------------------------------------
mvn -Dtests.vagrant -Psmoke-vms -pl qa/vagrant verify
---------------------------------------------
or this to validate all the VMs:
-------------------------------------------------
mvn -Dtests.vagrant=all -Psmoke-vms -pl qa/vagrant verify
-------------------------------------------------
That will start up the VMs and then immediate quit.
. Finally run the tests. The fastest way to get this started is to run:
-----------------------------------
mvn clean install -DskipTests
mvn -Dtests.vagrant -pl qa/vagrant verify
-----------------------------------
You could just run:
--------------------
mvn -Dtests.vagrant verify
--------------------
but that will run all the tests. Which is probably a good thing, but not always
what you want.
Whichever snippet you run mvn will build the tar, zip and deb packages. If you
have rpmbuild installed it'll build the rpm package as well. Then mvn will
spin up trusty and verify the tar, zip, and deb package. If you have rpmbuild
installed it'll spin up centos-7 and verify the tar, zip and rpm packages. We
chose those two distributions as the default because they cover deb and rpm
packaging and SyvVinit and systemd.
You can control the boxes that are used for testing like so. Run just
fedora-22 with:
--------------------------------------------
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest=fedora-22
--------------------------------------------
or run jessie and trusty:
------------------------------------------------------------------
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest='jessie, trusty'
------------------------------------------------------------------
or run all the boxes:
---------------------------------------
mvn -Dtests.vagrant=all -pl qa/vagrant verify
---------------------------------------
If you want to run a specific test on several boxes you can do:
---------------------------------------
mvn -Dtests.vagrant=all -pl qa/vagrant verify -DtestScripts=*tar*.bats
---------------------------------------
Its important to know that if you ctrl-c any of these `mvn` runs that you'll
probably leave a VM up. You can terminate it by running:
------------
vagrant halt
------------
This is just regular vagrant so you can run normal multi box vagrant commands
to test things manually. Just run:
---------------------------------------
vagrant up trusty --provider virtualbox && vagrant ssh trusty
---------------------------------------
to get an Ubuntu or
-------------------------------------------
vagrant up centos-7 --provider virtualbox && vagrant ssh centos-7
-------------------------------------------
to get a CentOS. Once you are done with them you should halt them:
-------------------
vagrant halt trusty
-------------------
All the regular vagrant commands should just work so you can get a shell in a
VM running trusty by running
`vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404`.
These are the linux flavors the Vagrantfile currently supports:
* precise aka Ubuntu 12.04
* trusty aka Ubuntu 14.04
* vivid aka Ubuntun 15.04
* jessie aka Debian 8, the current debina stable distribution
* ubuntu-1204 aka precise
* ubuntu-1404 aka trusty
* ubuntu-1504 aka vivid
* debian-8 aka jessie, the current debian stable distribution
* centos-6
* centos-7
* fedora-22
* oel-7 aka Oracle Enterprise Linux 7
* sles-12
* opensuse-13
We're missing the following from the support matrix because there aren't high
quality boxes available in vagrant atlas:
* sles-11
* opensuse-13
* oel-6
We're missing the follow because our tests are very linux/bash centric:
* Windows Server 2012
Its important to think of VMs like cattle: if they become lame you just shoot
Its important to think of VMs like cattle. If they become lame you just shoot
them and let vagrant reprovision them. Say you've hosed your precise VM:
----------------------------------------------------
vagrant ssh precise -c 'sudo rm -rf /bin'; echo oops
vagrant ssh ubuntu-1404 -c 'sudo rm -rf /bin'; echo oops
----------------------------------------------------
All you've got to do to get another one is
----------------------------------------------
vagrant destroy -f trusty && vagrant up trusty --provider virtualbox
vagrant destroy -f ubuntu-1404 && vagrant up ubuntu-1404 --provider virtualbox
----------------------------------------------
The whole process takes a minute and a half on a modern laptop, two and a half
@ -492,13 +415,8 @@ vagrant halt
vagrant destroy -f
------------------
----------
vagrant up
----------
would normally start all the VMs but we've prevented that because that'd
consume a ton of ram.
`vagrant up` would normally start all the VMs but we've prevented that because
that'd consume a ton of ram.
== Testing scripts more directly
@ -507,7 +425,7 @@ destructive. When working with a single package its generally faster to run its
tests in a tighter loop than maven provides. In one window:
--------------------------------
mvn -pl distribution/rpm package
gradle :distribution:rpm:assemble
--------------------------------
and in another window:
@ -521,10 +439,7 @@ sudo bats $BATS/*rpm*.bats
If you wanted to retest all the release artifacts on a single VM you could:
-------------------------------------------------
# Build all the distributions fresh but skip recompiling elasticsearch:
mvn -amd -pl distribution install -DskipTests
# Copy them all the testroot
mvn -Dtests.vagrant -pl qa/vagrant pre-integration-test
gradle prepareTestRoot
vagrant up trusty --provider virtualbox && vagrant ssh trusty
cd $TESTROOT
sudo bats $BATS/*.bats
@ -555,5 +470,22 @@ mvn -Dtests.coverage verify jacoco:report
== Debugging from an IDE
If you want to run elasticsearch from your IDE, you should execute ./run.sh
It opens a remote debugging port that you can connect with your IDE.
If you want to run elasticsearch from your IDE, the `gradle run` task
supports a remote debugging option:
---------------------------------------------------------------------------
gradle run --debug-jvm
---------------------------------------------------------------------------
== Building with extra plugins
Additional plugins may be built alongside elasticsearch, where their
dependency on elasticsearch will be substituted with the local elasticsearch
build. To add your plugin, create a directory called x-plugins as a sibling
of elasticsearch. Checkout your plugin underneath x-plugins and the build
will automatically pick it up. You can verify the plugin is included as part
of the build by checking the projects of the build.
---------------------------------------------------------------------------
gradle projects
---------------------------------------------------------------------------

27
Vagrantfile vendored
View File

@ -22,15 +22,15 @@
# under the License.
Vagrant.configure(2) do |config|
config.vm.define "precise" do |config|
config.vm.define "ubuntu-1204" do |config|
config.vm.box = "ubuntu/precise64"
ubuntu_common config
end
config.vm.define "trusty" do |config|
config.vm.define "ubuntu-1404" do |config|
config.vm.box = "ubuntu/trusty64"
ubuntu_common config
end
config.vm.define "vivid" do |config|
config.vm.define "ubuntu-1504" do |config|
config.vm.box = "ubuntu/vivid64"
ubuntu_common config, extra: <<-SHELL
# Install Jayatana so we can work around it being present.
@ -40,7 +40,7 @@ Vagrant.configure(2) do |config|
# Wheezy's backports don't contain Openjdk 8 and the backflips required to
# get the sun jdk on there just aren't worth it. We have jessie for testing
# debian and it works fine.
config.vm.define "jessie" do |config|
config.vm.define "debian-8" do |config|
config.vm.box = "debian/jessie64"
deb_common config,
'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports'
@ -137,7 +137,7 @@ def deb_common(config, add_openjdk_repository_command, openjdk_list, extra: '')
extra: <<-SHELL
export DEBIAN_FRONTEND=noninteractive
ls /etc/apt/sources.list.d/#{openjdk_list}.list > /dev/null 2>&1 ||
(echo "Importing java-8 ppa" &&
(echo "==> Importing java-8 ppa" &&
#{add_openjdk_repository_command} &&
apt-get update)
#{extra}
@ -223,9 +223,11 @@ def provision(config,
install() {
# Only apt-get update if we haven't in the last day
if [ ! -f #{update_tracking_file} ] || [ "x$(find #{update_tracking_file} -mtime +0)" == "x#{update_tracking_file}" ]; then
#{update_command} || true
touch #{update_tracking_file}
echo "==> Updating repository"
#{update_command} || true
touch #{update_tracking_file}
fi
echo "==> Installing $1"
#{install_command} $1
}
ensure() {
@ -242,17 +244,18 @@ def provision(config,
installed bats || {
# Bats lives in a git repository....
ensure git
echo "==> Installing bats"
git clone https://github.com/sstephenson/bats /tmp/bats
# Centos doesn't add /usr/local/bin to the path....
/tmp/bats/install.sh /usr
rm -rf /tmp/bats
}
cat \<\<VARS > /etc/profile.d/elasticsearch_vars.sh
export ZIP=/elasticsearch/distribution/zip/target/releases
export TAR=/elasticsearch/distribution/tar/target/releases
export RPM=/elasticsearch/distribution/rpm/target/releases
export DEB=/elasticsearch/distribution/deb/target/releases
export TESTROOT=/elasticsearch/qa/vagrant/target/testroot
export ZIP=/elasticsearch/distribution/zip/build/distributions
export TAR=/elasticsearch/distribution/tar/build/distributions
export RPM=/elasticsearch/distribution/rpm/build/distributions
export DEB=/elasticsearch/distribution/deb/build/distributions
export TESTROOT=/elasticsearch/qa/vagrant/build/testroot
export BATS=/elasticsearch/qa/vagrant/src/test/resources/packaging/scripts
VARS
SHELL

250
build.gradle Normal file
View File

@ -0,0 +1,250 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.bmuschko.gradle.nexus.NexusPlugin
import org.gradle.plugins.ide.eclipse.model.SourceFolder
// common maven publishing configuration
subprojects {
group = 'org.elasticsearch'
version = org.elasticsearch.gradle.VersionProperties.elasticsearch
plugins.withType(NexusPlugin).whenPluginAdded {
modifyPom {
project {
url 'https://github.com/elastic/elasticsearch'
inceptionYear '2009'
scm {
url 'https://github.com/elastic/elasticsearch'
connection 'scm:https://elastic@github.com/elastic/elasticsearch'
developerConnection 'scm:git://github.com/elastic/elasticsearch.git'
}
licenses {
license {
name 'The Apache Software License, Version 2.0'
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
}
}
extraArchive {
javadoc = true
tests = false
}
// we have our own username/password prompts so that they only happen once
// TODO: add gpg signing prompts
project.gradle.taskGraph.whenReady { taskGraph ->
if (taskGraph.allTasks.any { it.name == 'uploadArchives' }) {
Console console = System.console()
if (project.hasProperty('nexusUsername') == false) {
String nexusUsername = console.readLine('\nNexus username: ')
project.rootProject.allprojects.each {
it.ext.nexusUsername = nexusUsername
}
}
if (project.hasProperty('nexusPassword') == false) {
String nexusPassword = new String(console.readPassword('\nNexus password: '))
project.rootProject.allprojects.each {
it.ext.nexusPassword = nexusPassword
}
}
}
}
}
}
allprojects {
// injecting groovy property variables into all projects
project.ext {
// for eclipse hacks...
isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
}
}
subprojects {
project.afterEvaluate {
// include license and notice in jars
tasks.withType(Jar) {
into('META-INF') {
from project.rootProject.rootDir
include 'LICENSE.txt'
include 'NOTICE.txt'
}
}
// ignore missing javadocs
tasks.withType(Javadoc) { Javadoc javadoc ->
// the -quiet here is because of a bug in gradle, in that adding a string option
// by itself is not added to the options. By adding quiet, both this option and
// the "value" -quiet is added, separated by a space. This is ok since the javadoc
// command already adds -quiet, so we are just duplicating it
// see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959
javadoc.options.encoding='UTF8'
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
}
}
/* Sets up the dependencies that we build as part of this project but
register as thought they were external to resolve internally. We register
them as external dependencies so the build plugin that we use can be used
to build elasticsearch plugins outside of the elasticsearch source tree. */
ext.projectSubstitutions = [
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
"org.elasticsearch:elasticsearch:${version}": ':core',
"org.elasticsearch:test-framework:${version}": ':test-framework',
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip',
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',
"org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:deb',
]
configurations.all {
resolutionStrategy.dependencySubstitution { DependencySubstitutions subs ->
projectSubstitutions.each { k,v ->
subs.substitute(subs.module(k)).with(subs.project(v))
}
}
}
}
// Ensure similar tasks in dependent projects run first. The projectsEvaluated here is
// important because, while dependencies.all will pickup future dependencies,
// it is not necessarily true that the task exists in both projects at the time
// the dependency is added.
gradle.projectsEvaluated {
allprojects {
if (project.path == ':test-framework') {
// :test-framework:test cannot run before and after :core:test
return
}
configurations.all {
dependencies.all { Dependency dep ->
Project upstreamProject = null
if (dep instanceof ProjectDependency) {
upstreamProject = dep.dependencyProject
} else {
// gradle doesn't apply substitutions until resolve time, so they won't
// show up as a ProjectDependency above
String substitution = projectSubstitutions.get("${dep.group}:${dep.name}:${dep.version}")
if (substitution != null) {
upstreamProject = findProject(substitution)
}
}
if (upstreamProject != null) {
if (project.path == upstreamProject.path) {
// TODO: distribution integ tests depend on themselves (!), fix that
return
}
for (String taskName : ['test', 'integTest']) {
Task task = project.tasks.findByName(taskName)
Task upstreamTask = upstreamProject.tasks.findByName(taskName)
if (task != null && upstreamTask != null) {
task.mustRunAfter(upstreamTask)
}
}
}
}
}
}
}
// intellij configuration
allprojects {
apply plugin: 'idea'
}
idea {
project {
languageLevel = org.elasticsearch.gradle.BuildPlugin.minimumJava.toString()
vcs = 'Git'
}
}
// Make sure gradle idea was run before running anything in intellij (including import).
File ideaMarker = new File(projectDir, '.local-idea-is-configured')
tasks.idea.doLast {
ideaMarker.setText('', 'UTF-8')
}
if (System.getProperty('idea.active') != null && ideaMarker.exists() == false) {
throw new GradleException('You must run gradle idea from the root of elasticsearch before importing into IntelliJ')
}
// add buildSrc itself as a groovy project
task buildSrcIdea(type: GradleBuild) {
buildFile = 'buildSrc/build.gradle'
tasks = ['cleanIdea', 'ideaModule']
}
tasks.idea.dependsOn(buildSrcIdea)
// eclipse configuration
allprojects {
apply plugin: 'eclipse'
plugins.withType(JavaBasePlugin) {
eclipse.classpath.defaultOutputDir = new File(project.buildDir, 'eclipse')
eclipse.classpath.file.whenMerged { classpath ->
// give each source folder a unique corresponding output folder
int i = 0;
classpath.entries.findAll { it instanceof SourceFolder }.each { folder ->
i++;
// this is *NOT* a path or a file.
folder.output = "build/eclipse/" + i
}
}
}
task copyEclipseSettings(type: Copy) {
// TODO: "package this up" for external builds
from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings')
into '.settings'
}
// otherwise .settings is not nuked entirely
tasks.cleanEclipse {
delete '.settings'
}
// otherwise the eclipse merging is *super confusing*
tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings)
}
// add buildSrc itself as a groovy project
task buildSrcEclipse(type: GradleBuild) {
buildFile = 'buildSrc/build.gradle'
tasks = ['cleanEclipse', 'eclipse']
}
tasks.eclipse.dependsOn(buildSrcEclipse)
// we need to add the same --debug-jvm option as
// the real RunTask has, so we can pass it through
class Run extends DefaultTask {
boolean debug = false
@org.gradle.api.internal.tasks.options.Option(
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
public void setDebug(boolean enabled) {
project.project(':distribution').run.clusterConfig.debug = enabled
}
}
task run(type: Run) {
dependsOn ':distribution:run'
description = 'Runs elasticsearch in the foreground'
group = 'Verification'
impliesSubProjects = true
}

92
buildSrc/build.gradle Normal file
View File

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// we must use buildscript + apply so that an external plugin
// can apply this file, since the plugins directive is not
// supported through file includes
buildscript {
repositories {
jcenter()
}
dependencies {
classpath 'com.bmuschko:gradle-nexus-plugin:2.3.1'
}
}
apply plugin: 'groovy'
apply plugin: 'com.bmuschko.nexus'
// TODO: move common IDE configuration to a common file to include
apply plugin: 'idea'
apply plugin: 'eclipse'
group = 'org.elasticsearch.gradle'
archivesBaseName = 'build-tools'
Properties props = new Properties()
props.load(project.file('version.properties').newDataInputStream())
version = props.getProperty('elasticsearch')
repositories {
mavenCentral()
maven {
name 'sonatype-snapshots'
url "https://oss.sonatype.org/content/repositories/snapshots/"
}
jcenter()
}
dependencies {
compile gradleApi()
compile localGroovy()
compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}"
compile("junit:junit:${props.getProperty('junit')}") {
transitive = false
}
compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r'
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
compile 'de.thetaphi:forbiddenapis:2.0'
compile 'com.bmuschko:gradle-nexus-plugin:2.3.1'
}
processResources {
inputs.file('version.properties')
from 'version.properties'
}
extraArchive {
javadoc = false
tests = false
}
eclipse {
classpath {
defaultOutputDir = new File(file('build'), 'eclipse')
}
}
task copyEclipseSettings(type: Copy) {
from project.file('src/main/resources/eclipse.settings')
into '.settings'
}
// otherwise .settings is not nuked entirely
tasks.cleanEclipse {
delete '.settings'
}
tasks.eclipse.dependsOn(cleanEclipse, copyEclipseSettings)

View File

@ -0,0 +1,53 @@
package com.carrotsearch.gradle.junit4
import com.carrotsearch.ant.tasks.junit4.SuiteBalancer
import com.carrotsearch.ant.tasks.junit4.balancers.ExecutionTimeBalancer
import com.carrotsearch.ant.tasks.junit4.listeners.ExecutionTimesReport
import org.apache.tools.ant.types.FileSet
class BalancersConfiguration {
// parent task, so executionTime can register an additional listener
RandomizedTestingTask task
List<SuiteBalancer> balancers = new ArrayList<>()
void executionTime(Map<String,Object> properties) {
ExecutionTimeBalancer balancer = new ExecutionTimeBalancer()
FileSet fileSet = new FileSet()
Object filename = properties.remove('cacheFilename')
if (filename == null) {
throw new IllegalArgumentException('cacheFilename is required for executionTime balancer')
}
fileSet.setIncludes(filename.toString())
File cacheDir = task.project.projectDir
Object dir = properties.remove('cacheDir')
if (dir != null) {
cacheDir = new File(dir.toString())
}
fileSet.setDir(cacheDir)
balancer.add(fileSet)
int historySize = 10
Object size = properties.remove('historySize')
if (size instanceof Integer) {
historySize = (Integer)size
} else if (size != null) {
throw new IllegalArgumentException('historySize must be an integer')
}
ExecutionTimesReport listener = new ExecutionTimesReport()
listener.setFile(new File(cacheDir, filename.toString()))
listener.setHistoryLength(historySize)
if (properties.isEmpty() == false) {
throw new IllegalArgumentException('Unknown properties for executionTime balancer: ' + properties.keySet())
}
task.listenersConfig.listeners.add(listener)
balancers.add(balancer)
}
void custom(SuiteBalancer balancer) {
balancers.add(balancer)
}
}

View File

@ -0,0 +1,25 @@
package com.carrotsearch.gradle.junit4
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
import com.carrotsearch.ant.tasks.junit4.listeners.antxml.AntXmlReport
class ListenersConfiguration {
RandomizedTestingTask task
List<AggregatedEventListener> listeners = new ArrayList<>()
void junitReport(Map<String, Object> props) {
AntXmlReport reportListener = new AntXmlReport()
Object dir = props == null ? null : props.get('dir')
if (dir != null) {
reportListener.setDir(task.project.file(dir))
} else {
reportListener.setDir(new File(task.project.buildDir, 'reports' + File.separator + "${task.name}Junit"))
}
listeners.add(reportListener)
}
void custom(AggregatedEventListener listener) {
listeners.add(listener)
}
}

View File

@ -0,0 +1,64 @@
package com.carrotsearch.gradle.junit4
import org.gradle.api.logging.LogLevel
import org.gradle.api.logging.Logger
/**
* Writes data passed to this stream as log messages.
*
* The stream will be flushed whenever a newline is detected.
* Allows setting an optional prefix before each line of output.
*/
public class LoggingOutputStream extends OutputStream {
/** The starting length of the buffer */
static final int DEFAULT_BUFFER_LENGTH = 4096
/** The buffer of bytes sent to the stream */
byte[] buffer = new byte[DEFAULT_BUFFER_LENGTH]
/** Offset of the start of unwritten data in the buffer */
int start = 0
/** Offset of the end (semi-open) of unwritten data in the buffer */
int end = 0
/** Logger to write stream data to */
Logger logger
/** Prefix to add before each line of output */
String prefix = ""
/** Log level to write log messages to */
LogLevel level
void write(final int b) throws IOException {
if (b == 0) return;
if (b == (int)'\n' as char) {
// always flush with newlines instead of adding to the buffer
flush()
return
}
if (end == buffer.length) {
if (start != 0) {
// first try shifting the used buffer back to the beginning to make space
System.arraycopy(buffer, start, buffer, 0, end - start)
} else {
// need more space, extend the buffer
}
final int newBufferLength = buffer.length + DEFAULT_BUFFER_LENGTH;
final byte[] newBuffer = new byte[newBufferLength];
System.arraycopy(buffer, 0, newBuffer, 0, buffer.length);
buffer = newBuffer;
}
buffer[end++] = (byte) b;
}
void flush() {
if (end == start) return
logger.log(level, prefix + new String(buffer, start, end - start));
start = end
}
}

View File

@ -0,0 +1,47 @@
package com.carrotsearch.gradle.junit4
import com.carrotsearch.ant.tasks.junit4.JUnit4
import org.gradle.api.AntBuilder
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.TaskContainer
import org.gradle.api.tasks.testing.Test
class RandomizedTestingPlugin implements Plugin<Project> {
void apply(Project project) {
replaceTestTask(project.tasks)
configureAnt(project.ant)
}
static void replaceTestTask(TaskContainer tasks) {
Test oldTestTask = tasks.findByPath('test')
if (oldTestTask == null) {
// no test task, ok, user will use testing task on their own
return
}
tasks.remove(oldTestTask)
Map properties = [
name: 'test',
type: RandomizedTestingTask,
dependsOn: oldTestTask.dependsOn,
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Runs unit tests with the randomized testing framework'
]
RandomizedTestingTask newTestTask = tasks.create(properties)
newTestTask.classpath = oldTestTask.classpath
newTestTask.testClassesDir = oldTestTask.testClassesDir
// hack so check task depends on custom test
Task checkTask = tasks.findByPath('check')
checkTask.dependsOn.remove(oldTestTask)
checkTask.dependsOn.add(newTestTask)
}
static void configureAnt(AntBuilder ant) {
ant.project.addTaskDefinition('junit4:junit4', JUnit4.class)
}
}

View File

@ -0,0 +1,304 @@
package com.carrotsearch.gradle.junit4
import com.carrotsearch.ant.tasks.junit4.ListenersList
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
import com.esotericsoftware.kryo.serializers.FieldSerializer
import groovy.xml.NamespaceBuilder
import groovy.xml.NamespaceBuilderSupport
import org.apache.tools.ant.BuildException
import org.apache.tools.ant.DefaultLogger
import org.apache.tools.ant.RuntimeConfigurable
import org.apache.tools.ant.UnknownElement
import org.gradle.api.DefaultTask
import org.gradle.api.file.FileCollection
import org.gradle.api.file.FileTreeElement
import org.gradle.api.internal.tasks.options.Option
import org.gradle.api.specs.Spec
import org.gradle.api.tasks.*
import org.gradle.api.tasks.util.PatternFilterable
import org.gradle.api.tasks.util.PatternSet
import org.gradle.logging.ProgressLoggerFactory
import org.gradle.util.ConfigureUtil
import javax.inject.Inject
class RandomizedTestingTask extends DefaultTask {
// TODO: change to "executable" to match gradle test params?
@Optional
@Input
String jvm = 'java'
@Optional
@Input
File workingDir = new File(project.buildDir, 'testrun' + File.separator + name)
@Optional
@Input
FileCollection classpath
@Input
String parallelism = '1'
@InputDirectory
File testClassesDir
@Optional
@Input
boolean haltOnFailure = true
@Optional
@Input
boolean shuffleOnSlave = true
@Optional
@Input
boolean enableAssertions = true
@Optional
@Input
boolean enableSystemAssertions = true
@Optional
@Input
boolean leaveTemporary = false
@Optional
@Input
String ifNoTests = 'ignore'
TestLoggingConfiguration testLoggingConfig = new TestLoggingConfiguration()
BalancersConfiguration balancersConfig = new BalancersConfiguration(task: this)
ListenersConfiguration listenersConfig = new ListenersConfiguration(task: this)
List<String> jvmArgs = new ArrayList<>()
@Optional
@Input
String argLine = null
Map<String, Object> systemProperties = new HashMap<>()
PatternFilterable patternSet = new PatternSet()
RandomizedTestingTask() {
outputs.upToDateWhen {false} // randomized tests are never up to date
listenersConfig.listeners.add(new TestProgressLogger(factory: getProgressLoggerFactory()))
listenersConfig.listeners.add(new TestReportLogger(logger: logger, config: testLoggingConfig))
}
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException();
}
void jvmArgs(Iterable<String> arguments) {
jvmArgs.addAll(arguments)
}
void jvmArg(String argument) {
jvmArgs.add(argument)
}
void systemProperty(String property, Object value) {
systemProperties.put(property, value)
}
void include(String... includes) {
this.patternSet.include(includes);
}
void include(Iterable<String> includes) {
this.patternSet.include(includes);
}
void include(Spec<FileTreeElement> includeSpec) {
this.patternSet.include(includeSpec);
}
void include(Closure includeSpec) {
this.patternSet.include(includeSpec);
}
void exclude(String... excludes) {
this.patternSet.exclude(excludes);
}
void exclude(Iterable<String> excludes) {
this.patternSet.exclude(excludes);
}
void exclude(Spec<FileTreeElement> excludeSpec) {
this.patternSet.exclude(excludeSpec);
}
void exclude(Closure excludeSpec) {
this.patternSet.exclude(excludeSpec);
}
@Input
void testLogging(Closure closure) {
ConfigureUtil.configure(closure, testLoggingConfig)
}
@Input
void balancers(Closure closure) {
ConfigureUtil.configure(closure, balancersConfig)
}
@Input
void listeners(Closure closure) {
ConfigureUtil.configure(closure, listenersConfig)
}
@Option(
option = "tests",
description = "Sets test class or method name to be included. This is for IDEs. Use -Dtests.class and -Dtests.method"
)
void setTestNameIncludePattern(String testNamePattern) {
// This is only implemented to give support for IDEs running tests. There are 3 patterns expected:
// * An exact test class and method
// * An exact test class
// * A package name prefix, ending with .*
// There is no way to distinguish the first two without looking at classes, so we use the rule
// that class names start with an uppercase letter...
// TODO: this doesn't work yet, but not sure why...intellij says it is using --tests, and this work from the command line...
String[] parts = testNamePattern.split('\\.')
String lastPart = parts[parts.length - 1]
String classname
String methodname = null
if (lastPart.equals('*') || lastPart.charAt(0).isUpperCase()) {
// package name or class name, just pass through
classname = testNamePattern
} else {
// method name, need to separate
methodname = lastPart
classname = testNamePattern.substring(0, testNamePattern.length() - lastPart.length() - 1)
}
ant.setProperty('tests.class', classname)
if (methodname != null) {
ant.setProperty('tests.method', methodname)
}
}
@TaskAction
void executeTests() {
Map attributes = [
jvm: jvm,
parallelism: parallelism,
heartbeat: testLoggingConfig.slowTests.heartbeat,
dir: workingDir,
tempdir: new File(workingDir, 'temp'),
haltOnFailure: true, // we want to capture when a build failed, but will decide whether to rethrow later
shuffleOnSlave: shuffleOnSlave,
leaveTemporary: leaveTemporary,
ifNoTests: ifNoTests
]
DefaultLogger listener = null
ByteArrayOutputStream antLoggingBuffer = null
if (logger.isInfoEnabled() == false) {
// in info logging, ant already outputs info level, so we see everything
// but on errors or when debugging, we want to see info level messages
// because junit4 emits jvm output with ant logging
if (testLoggingConfig.outputMode == TestLoggingConfiguration.OutputMode.ALWAYS) {
// we want all output, so just stream directly
listener = new DefaultLogger(
errorPrintStream: System.err,
outputPrintStream: System.out,
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
} else {
// we want to buffer the info, and emit it if the test fails
antLoggingBuffer = new ByteArrayOutputStream()
PrintStream stream = new PrintStream(antLoggingBuffer, true, "UTF-8")
listener = new DefaultLogger(
errorPrintStream: stream,
outputPrintStream: stream,
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
}
project.ant.project.addBuildListener(listener)
}
NamespaceBuilderSupport junit4 = NamespaceBuilder.newInstance(ant, 'junit4')
try {
junit4.junit4(attributes) {
classpath {
pathElement(path: classpath.asPath)
}
if (enableAssertions) {
jvmarg(value: '-ea')
}
if (enableSystemAssertions) {
jvmarg(value: '-esa')
}
for (String arg : jvmArgs) {
jvmarg(value: arg)
}
if (argLine != null) {
jvmarg(line: argLine)
}
fileset(dir: testClassesDir) {
for (String includePattern : patternSet.getIncludes()) {
include(name: includePattern)
}
for (String excludePattern : patternSet.getExcludes()) {
exclude(name: excludePattern)
}
}
for (Map.Entry<String, Object> prop : systemProperties) {
sysproperty key: prop.getKey(), value: prop.getValue().toString()
}
makeListeners()
}
} catch (BuildException e) {
if (antLoggingBuffer != null) {
logger.error('JUnit4 test failed, ant output was:')
logger.error(antLoggingBuffer.toString('UTF-8'))
}
if (haltOnFailure) {
throw e;
}
}
if (listener != null) {
// remove the listener we added so other ant tasks dont have verbose logging!
project.ant.project.removeBuildListener(listener)
}
}
static class ListenersElement extends UnknownElement {
AggregatedEventListener[] listeners
ListenersElement() {
super('listeners')
setNamespace('junit4')
setQName('listeners')
}
public void handleChildren(Object realThing, RuntimeConfigurable wrapper) {
assert realThing instanceof ListenersList
ListenersList list = (ListenersList)realThing
for (AggregatedEventListener listener : listeners) {
list.addConfigured(listener)
}
}
}
/**
* Makes an ant xml element for 'listeners' just as AntBuilder would, except configuring
* the element adds the already created children.
*/
def makeListeners() {
def context = ant.getAntXmlContext()
def parentWrapper = context.currentWrapper()
def parent = parentWrapper.getProxy()
UnknownElement element = new ListenersElement(listeners: listenersConfig.listeners)
element.setProject(context.getProject())
element.setRealThing(logger)
((UnknownElement)parent).addChild(element)
RuntimeConfigurable wrapper = new RuntimeConfigurable(element, element.getQName())
parentWrapper.addChild(wrapper)
return wrapper.getProxy()
}
}

View File

@ -0,0 +1,14 @@
package com.carrotsearch.gradle.junit4
class SlowTestsConfiguration {
int heartbeat = 0
int summarySize = 0
void heartbeat(int heartbeat) {
this.heartbeat = heartbeat
}
void summarySize(int summarySize) {
this.summarySize = summarySize
}
}

View File

@ -0,0 +1,14 @@
package com.carrotsearch.gradle.junit4
class StackTraceFiltersConfiguration {
List<String> patterns = new ArrayList<>()
List<String> contains = new ArrayList<>()
void regex(String pattern) {
patterns.add(pattern)
}
void contains(String contain) {
contains.add(contain)
}
}

View File

@ -0,0 +1,43 @@
package com.carrotsearch.gradle.junit4
import org.gradle.api.tasks.Input
import org.gradle.util.ConfigureUtil
class TestLoggingConfiguration {
/** Display mode for output streams. */
static enum OutputMode {
/** Always display the output emitted from tests. */
ALWAYS,
/**
* Display the output only if a test/ suite failed. This requires internal buffering
* so the output will be shown only after a test completes.
*/
ONERROR,
/** Don't display the output, even on test failures. */
NEVER
}
OutputMode outputMode = OutputMode.ONERROR
SlowTestsConfiguration slowTests = new SlowTestsConfiguration()
StackTraceFiltersConfiguration stackTraceFilters = new StackTraceFiltersConfiguration()
/** Summarize the first N failures at the end of the test. */
@Input
int showNumFailuresAtEnd = 3 // match TextReport default
void slowTests(Closure closure) {
ConfigureUtil.configure(closure, slowTests)
}
void stackTraceFilters(Closure closure) {
ConfigureUtil.configure(closure, stackTraceFilters)
}
void outputMode(String mode) {
outputMode = mode.toUpperCase() as OutputMode
}
void showNumFailuresAtEnd(int n) {
showNumFailuresAtEnd = n
}
}

View File

@ -0,0 +1,187 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.carrotsearch.gradle.junit4
import com.carrotsearch.ant.tasks.junit4.JUnit4
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.eventbus.Subscribe
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import org.junit.runner.Description
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.*
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds
import static java.lang.Math.max
/**
* Adapts junit4's event listeners into gradle's ProgressLogger. Note that
* junit4 guarantees (via guava) that methods on this class won't be called by
* multiple threads simultaneously which is helpful in making it simpler.
*
* Every time a test finishes this class will update the logger. It will log
* the last finished test method on the logger line until the first suite
* finishes. Once the first suite finishes it always logs the last finished
* suite. This means that in test runs with a single suite the logger will be
* updated with the test name the whole time which is useful because these runs
* usually have longer individual tests. For test runs with lots of suites the
* majority of the time is spent showing the last suite that finished which is
* more useful for those test runs because test methods there tend to be very
* quick.
*/
class TestProgressLogger implements AggregatedEventListener {
/** Factory to build a progress logger when testing starts */
ProgressLoggerFactory factory
ProgressLogger progressLogger
int totalSuites
int totalSlaves
// sprintf formats used to align the integers we print
String suitesFormat
String slavesFormat
String testsFormat
// Counters incremented test completion.
volatile int suitesCompleted = 0
volatile int testsCompleted = 0
volatile int testsFailed = 0
volatile int testsIgnored = 0
// Information about the last, most interesting event.
volatile String eventDescription
volatile int eventSlave
volatile long eventExecutionTime
/** Have we finished a whole suite yet? */
volatile boolean suiteFinished = false
/* Note that we probably overuse volatile here but it isn't hurting us and
lets us move things around without worying about breaking things. */
@Subscribe
void onStart(AggregatedStartEvent e) throws IOException {
totalSuites = e.suiteCount
totalSlaves = e.slaveCount
progressLogger = factory.newOperation(TestProgressLogger)
progressLogger.setDescription('Randomized test runner')
progressLogger.started()
progressLogger.progress(
"Starting JUnit4 for ${totalSuites} suites on ${totalSlaves} jvms")
suitesFormat = "%0${widthForTotal(totalSuites)}d"
slavesFormat = "%-${widthForTotal(totalSlaves)}s"
/* Just guess the number of tests because we can't figure it out from
here and it isn't worth doing anything fancy to prevent the console
from jumping around a little. 200 is a pretty wild guess for the
minimum but it makes REST tests output sanely. */
int totalNumberOfTestsGuess = max(200, totalSuites * 10)
testsFormat = "%0${widthForTotal(totalNumberOfTestsGuess)}d"
}
@Subscribe
void onTestResult(AggregatedTestResultEvent e) throws IOException {
testsCompleted++
switch (e.status) {
case ERROR:
case FAILURE:
testsFailed++
break
case IGNORED:
case IGNORED_ASSUMPTION:
testsIgnored++
break
case OK:
break
default:
throw new IllegalArgumentException(
"Unknown test status: [${e.status}]")
}
if (!suiteFinished) {
updateEventInfo(e)
}
log()
}
@Subscribe
void onSuiteResult(AggregatedSuiteResultEvent e) throws IOException {
suitesCompleted++
suiteFinished = true
updateEventInfo(e)
log()
}
/**
* Update the suite information with a junit4 event.
*/
private void updateEventInfo(Object e) {
eventDescription = simpleName(e.description.className)
if (e.description.methodName != null) {
eventDescription += "#${e.description.methodName}"
}
eventSlave = e.slave.id
eventExecutionTime = e.executionTime
}
/**
* Extract a Class#getSimpleName style name from Class#getName style
* string. We can't just use Class#getSimpleName because junit descriptions
* don't alway s set the class field but they always set the className
* field.
*/
private static String simpleName(String className) {
return className.substring(className.lastIndexOf('.') + 1)
}
private void log() {
/* Remember that instances of this class are only ever active on one
thread at a time so there really aren't race conditions here. It'd be
OK if there were because they'd only display an overcount
temporarily. */
String log = ''
if (totalSuites > 1) {
/* Skip printing the suites to save space when there is only a
single suite. This is nice because when there is only a single
suite we log the method name and those can be long. */
log += sprintf("Suites [${suitesFormat}/${suitesFormat}], ",
[suitesCompleted, totalSuites])
}
log += sprintf("Tests [${testsFormat}|%d|%d], ",
[testsCompleted, testsFailed, testsIgnored])
log += "in ${formatDurationInSeconds(eventExecutionTime)} "
if (totalSlaves > 1) {
/* Skip printing the slaves if there is only one of them. This is
nice because when there is only a single slave there is often
only a single suite and we could use the extra space to log the
test method names. */
log += "J${sprintf(slavesFormat, eventSlave)} "
}
log += "completed ${eventDescription}"
progressLogger.progress(log)
}
private static int widthForTotal(int total) {
return ((total - 1) as String).length()
}
@Override
void setOuter(JUnit4 junit) {}
}

View File

@ -0,0 +1,388 @@
package com.carrotsearch.gradle.junit4
import com.carrotsearch.ant.tasks.junit4.JUnit4
import com.carrotsearch.ant.tasks.junit4.Pluralize
import com.carrotsearch.ant.tasks.junit4.TestsSummaryEventListener
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.base.Strings
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.eventbus.Subscribe
import com.carrotsearch.ant.tasks.junit4.events.*
import com.carrotsearch.ant.tasks.junit4.events.aggregated.*
import com.carrotsearch.ant.tasks.junit4.events.mirrors.FailureMirror
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
import com.carrotsearch.ant.tasks.junit4.listeners.StackTraceFilter
import org.apache.tools.ant.filters.TokenFilter
import org.gradle.api.logging.LogLevel
import org.gradle.api.logging.Logger
import org.junit.runner.Description
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.Line;
import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.*
import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode
class TestReportLogger extends TestsSummaryEventListener implements AggregatedEventListener {
static final String FAILURE_MARKER = " <<< FAILURES!"
/** Status names column. */
static EnumMap<? extends TestStatus, String> statusNames;
static {
statusNames = new EnumMap<>(TestStatus.class);
for (TestStatus s : TestStatus.values()) {
statusNames.put(s,
s == TestStatus.IGNORED_ASSUMPTION
? "IGNOR/A" : s.toString());
}
}
JUnit4 owner
/** Logger to write the report to */
Logger logger
TestLoggingConfiguration config
/** Forked concurrent JVM count. */
int forkedJvmCount
/** Format line for JVM ID string. */
String jvmIdFormat
/** Output stream that logs messages to the given logger */
LoggingOutputStream outStream
LoggingOutputStream errStream
/** A list of failed tests, if to be displayed at the end. */
List<Description> failedTests = new ArrayList<>()
/** Stack trace filters. */
StackTraceFilter stackFilter = new StackTraceFilter()
Map<String, Long> suiteTimes = new HashMap<>()
boolean slowTestsFound = false
int totalSuites
AtomicInteger suitesCompleted = new AtomicInteger()
@Subscribe
void onStart(AggregatedStartEvent e) throws IOException {
this.totalSuites = e.getSuiteCount();
StringBuilder info = new StringBuilder('==> Test Info: ')
info.append('seed=' + owner.getSeed() + '; ')
info.append(Pluralize.pluralize(e.getSlaveCount(), 'jvm') + '=' + e.getSlaveCount() + '; ')
info.append(Pluralize.pluralize(e.getSuiteCount(), 'suite') + '=' + e.getSuiteCount())
logger.lifecycle(info.toString())
forkedJvmCount = e.getSlaveCount();
jvmIdFormat = " J%-" + (1 + (int) Math.floor(Math.log10(forkedJvmCount))) + "d";
outStream = new LoggingOutputStream(logger: logger, level: LogLevel.LIFECYCLE, prefix: " 1> ")
errStream = new LoggingOutputStream(logger: logger, level: LogLevel.ERROR, prefix: " 2> ")
for (String contains : config.stackTraceFilters.contains) {
TokenFilter.ContainsString containsFilter = new TokenFilter.ContainsString()
containsFilter.setContains(contains)
stackFilter.addContainsString(containsFilter)
}
for (String pattern : config.stackTraceFilters.patterns) {
TokenFilter.ContainsRegex regexFilter = new TokenFilter.ContainsRegex()
regexFilter.setPattern(pattern)
stackFilter.addContainsRegex(regexFilter)
}
}
@Subscribe
void onChildBootstrap(ChildBootstrap e) throws IOException {
logger.info("Started J" + e.getSlave().id + " PID(" + e.getSlave().getPidString() + ").");
}
@Subscribe
void onHeartbeat(HeartBeatEvent e) throws IOException {
logger.warn("HEARTBEAT J" + e.getSlave().id + " PID(" + e.getSlave().getPidString() + "): " +
formatTime(e.getCurrentTime()) + ", stalled for " +
formatDurationInSeconds(e.getNoEventDuration()) + " at: " +
(e.getDescription() == null ? "<unknown>" : formatDescription(e.getDescription())))
try {
playBeat();
} catch (Exception nosound) { /* handling exceptions with style */ }
slowTestsFound = true
}
void playBeat() throws Exception {
Clip clip = (Clip)AudioSystem.getLine(new Line.Info(Clip.class));
final AtomicBoolean stop = new AtomicBoolean();
clip.addLineListener(new LineListener() {
@Override
public void update(LineEvent event) {
if (event.getType() == LineEvent.Type.STOP) {
stop.set(true);
}
}
});
InputStream stream = getClass().getResourceAsStream("/beat.wav");
try {
clip.open(AudioSystem.getAudioInputStream(stream));
clip.start();
while (!stop.get()) {
Thread.sleep(20);
}
clip.close();
} finally {
stream.close();
}
}
@Subscribe
void onQuit(AggregatedQuitEvent e) throws IOException {
if (config.showNumFailuresAtEnd > 0 && !failedTests.isEmpty()) {
List<Description> sublist = this.failedTests
StringBuilder b = new StringBuilder()
b.append('Tests with failures')
if (sublist.size() > config.showNumFailuresAtEnd) {
sublist = sublist.subList(0, config.showNumFailuresAtEnd)
b.append(" (first " + config.showNumFailuresAtEnd + " out of " + failedTests.size() + ")")
}
b.append(':\n')
for (Description description : sublist) {
b.append(" - ").append(formatDescription(description, true)).append('\n')
}
logger.warn(b.toString())
}
if (config.slowTests.summarySize > 0) {
List<Map.Entry<String, Long>> sortedSuiteTimes = new ArrayList<>(suiteTimes.entrySet())
Collections.sort(sortedSuiteTimes, new Comparator<Map.Entry<String, Long>>() {
@Override
int compare(Map.Entry<String, Long> o1, Map.Entry<String, Long> o2) {
return o2.value - o1.value // sort descending
}
})
LogLevel level = slowTestsFound ? LogLevel.WARN : LogLevel.INFO
int numToLog = Math.min(config.slowTests.summarySize, sortedSuiteTimes.size())
logger.log(level, 'Slow Tests Summary:')
for (int i = 0; i < numToLog; ++i) {
logger.log(level, String.format(Locale.ENGLISH, '%6.2fs | %s',
sortedSuiteTimes.get(i).value / 1000.0,
sortedSuiteTimes.get(i).key));
}
logger.log(level, '') // extra vertical separation
}
if (failedTests.isEmpty()) {
// summary is already printed for failures
logger.lifecycle('==> Test Summary: ' + getResult().toString())
}
}
@Subscribe
void onSuiteStart(AggregatedSuiteStartedEvent e) throws IOException {
if (isPassthrough()) {
SuiteStartedEvent evt = e.getSuiteStartedEvent();
emitSuiteStart(LogLevel.LIFECYCLE, evt.getDescription());
}
}
@Subscribe
void onOutput(PartialOutputEvent e) throws IOException {
if (isPassthrough()) {
// We only allow passthrough output if there is one JVM.
switch (e.getEvent().getType()) {
case EventType.APPEND_STDERR:
((IStreamEvent) e.getEvent()).copyTo(errStream);
break;
case EventType.APPEND_STDOUT:
((IStreamEvent) e.getEvent()).copyTo(outStream);
break;
default:
break;
}
}
}
@Subscribe
void onTestResult(AggregatedTestResultEvent e) throws IOException {
if (isPassthrough() && e.getStatus() != TestStatus.OK) {
flushOutput();
emitStatusLine(LogLevel.ERROR, e, e.getStatus(), e.getExecutionTime());
}
if (!e.isSuccessful()) {
failedTests.add(e.getDescription());
}
}
@Subscribe
void onSuiteResult(AggregatedSuiteResultEvent e) throws IOException {
final int completed = suitesCompleted.incrementAndGet();
if (e.isSuccessful() && e.getTests().isEmpty()) {
return;
}
if (config.slowTests.summarySize > 0) {
suiteTimes.put(e.getDescription().getDisplayName(), e.getExecutionTime())
}
LogLevel level = e.isSuccessful() && config.outputMode != OutputMode.ALWAYS ? LogLevel.INFO : LogLevel.LIFECYCLE
// We must emit buffered test and stream events (in case of failures).
if (!isPassthrough()) {
emitSuiteStart(level, e.getDescription())
emitBufferedEvents(level, e)
}
// Emit a synthetic failure for suite-level errors, if any.
if (!e.getFailures().isEmpty()) {
emitStatusLine(level, e, TestStatus.ERROR, 0)
}
if (!e.getFailures().isEmpty()) {
failedTests.add(e.getDescription())
}
emitSuiteEnd(level, e, completed)
}
/** Suite prologue. */
void emitSuiteStart(LogLevel level, Description description) throws IOException {
logger.log(level, 'Suite: ' + description.getDisplayName());
}
void emitBufferedEvents(LogLevel level, AggregatedSuiteResultEvent e) throws IOException {
if (config.outputMode == OutputMode.NEVER) {
return
}
final IdentityHashMap<TestFinishedEvent,AggregatedTestResultEvent> eventMap = new IdentityHashMap<>();
for (AggregatedTestResultEvent tre : e.getTests()) {
eventMap.put(tre.getTestFinishedEvent(), tre)
}
final boolean emitOutput = config.outputMode == OutputMode.ALWAYS && isPassthrough() == false ||
config.outputMode == OutputMode.ONERROR && e.isSuccessful() == false
for (IEvent event : e.getEventStream()) {
switch (event.getType()) {
case EventType.APPEND_STDOUT:
if (emitOutput) ((IStreamEvent) event).copyTo(outStream);
break;
case EventType.APPEND_STDERR:
if (emitOutput) ((IStreamEvent) event).copyTo(errStream);
break;
case EventType.TEST_FINISHED:
assert eventMap.containsKey(event)
final AggregatedTestResultEvent aggregated = eventMap.get(event);
if (aggregated.getStatus() != TestStatus.OK) {
flushOutput();
emitStatusLine(level, aggregated, aggregated.getStatus(), aggregated.getExecutionTime());
}
default:
break;
}
}
if (emitOutput) {
flushOutput()
}
}
void emitSuiteEnd(LogLevel level, AggregatedSuiteResultEvent e, int suitesCompleted) throws IOException {
final StringBuilder b = new StringBuilder();
b.append(String.format(Locale.ENGLISH, 'Completed [%d/%d]%s in %.2fs, ',
suitesCompleted,
totalSuites,
e.getSlave().slaves > 1 ? ' on J' + e.getSlave().id : '',
e.getExecutionTime() / 1000.0d));
b.append(e.getTests().size()).append(Pluralize.pluralize(e.getTests().size(), ' test'));
int failures = e.getFailureCount();
if (failures > 0) {
b.append(', ').append(failures).append(Pluralize.pluralize(failures, ' failure'));
}
int errors = e.getErrorCount();
if (errors > 0) {
b.append(', ').append(errors).append(Pluralize.pluralize(errors, ' error'));
}
int ignored = e.getIgnoredCount();
if (ignored > 0) {
b.append(', ').append(ignored).append(' skipped');
}
if (!e.isSuccessful()) {
b.append(' <<< FAILURES!');
}
b.append('\n')
logger.log(level, b.toString());
}
/** Emit status line for an aggregated event. */
void emitStatusLine(LogLevel level, AggregatedResultEvent result, TestStatus status, long timeMillis) throws IOException {
final StringBuilder line = new StringBuilder();
line.append(Strings.padEnd(statusNames.get(status), 8, ' ' as char))
line.append(formatDurationInSeconds(timeMillis))
if (forkedJvmCount > 1) {
line.append(String.format(Locale.ENGLISH, jvmIdFormat, result.getSlave().id))
}
line.append(' | ')
line.append(formatDescription(result.getDescription()))
if (!result.isSuccessful()) {
line.append(FAILURE_MARKER)
}
logger.log(level, line.toString())
PrintWriter writer = new PrintWriter(new LoggingOutputStream(logger: logger, level: level, prefix: ' > '))
if (status == TestStatus.IGNORED && result instanceof AggregatedTestResultEvent) {
writer.write('Cause: ')
writer.write(((AggregatedTestResultEvent) result).getCauseForIgnored())
writer.flush()
}
final List<FailureMirror> failures = result.getFailures();
if (!failures.isEmpty()) {
int count = 0;
for (FailureMirror fm : failures) {
count++;
if (fm.isAssumptionViolation()) {
writer.write(String.format(Locale.ENGLISH,
'Assumption #%d: %s',
count, fm.getMessage() == null ? '(no message)' : fm.getMessage()));
} else {
writer.write(String.format(Locale.ENGLISH,
'Throwable #%d: %s',
count,
stackFilter.apply(fm.getTrace())));
}
}
writer.flush()
}
}
void flushOutput() throws IOException {
outStream.flush()
errStream.flush()
}
/** Returns true if output should be logged immediately. */
boolean isPassthrough() {
return forkedJvmCount == 1 && config.outputMode == OutputMode.ALWAYS
}
@Override
void setOuter(JUnit4 task) {
owner = task
}
}

View File

@ -0,0 +1,426 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
import org.gradle.process.ExecResult
import java.time.ZonedDateTime
import java.time.ZoneOffset
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.gradle.api.*
import org.gradle.api.artifacts.*
import org.gradle.api.artifacts.dsl.RepositoryHandler
import org.gradle.api.artifacts.maven.MavenPom
import org.gradle.api.tasks.bundling.Jar
import org.gradle.api.tasks.compile.JavaCompile
import org.gradle.internal.jvm.Jvm
import org.gradle.util.GradleVersion
/**
* Encapsulates build configuration for elasticsearch projects.
*/
class BuildPlugin implements Plugin<Project> {
static final JavaVersion minimumJava = JavaVersion.VERSION_1_8
@Override
void apply(Project project) {
project.pluginManager.apply('java')
project.pluginManager.apply('carrotsearch.randomized-testing')
// these plugins add lots of info to our jars
configureJarManifest(project) // jar config must be added before info broker
project.pluginManager.apply('nebula.info-broker')
project.pluginManager.apply('nebula.info-basic')
project.pluginManager.apply('nebula.info-java')
project.pluginManager.apply('nebula.info-scm')
project.pluginManager.apply('nebula.info-jar')
project.pluginManager.apply('com.bmuschko.nexus')
project.pluginManager.apply(ProvidedBasePlugin)
globalBuildInfo(project)
configureRepositories(project)
configureConfigurations(project)
project.ext.versions = VersionProperties.versions
configureCompile(project)
configureTest(project)
configurePrecommit(project)
}
/** Performs checks on the build environment and prints information about the build environment. */
static void globalBuildInfo(Project project) {
if (project.rootProject.ext.has('buildChecksDone') == false) {
String javaHome = findJavaHome()
File gradleJavaHome = Jvm.current().javaHome
String gradleJavaVersionDetails = "${System.getProperty('java.vendor')} ${System.getProperty('java.version')}" +
" [${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]"
String javaVersionDetails = gradleJavaVersionDetails
String javaVersion = System.getProperty('java.version')
JavaVersion javaVersionEnum = JavaVersion.current()
if (new File(javaHome).canonicalPath != gradleJavaHome.canonicalPath) {
javaVersionDetails = findJavaVersionDetails(project, javaHome)
javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome))
javaVersion = findJavaVersion(project, javaHome)
}
// Build debugging info
println '======================================='
println 'Elasticsearch Build Hamster says Hello!'
println '======================================='
println " Gradle Version : ${project.gradle.gradleVersion}"
println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})"
if (gradleJavaVersionDetails != javaVersionDetails) {
println " JDK Version (gradle) : ${gradleJavaVersionDetails}"
println " JDK Version (compile) : ${javaVersionDetails}"
} else {
println " JDK Version : ${gradleJavaVersionDetails}"
}
// enforce gradle version
GradleVersion minGradle = GradleVersion.version('2.8')
if (GradleVersion.current() < minGradle) {
throw new GradleException("${minGradle} or above is required to build elasticsearch")
}
// enforce Java version
if (javaVersionEnum < minimumJava) {
throw new GradleException("Java ${minimumJava} or above is required to build Elasticsearch")
}
project.rootProject.ext.javaHome = javaHome
project.rootProject.ext.javaVersion = javaVersion
project.rootProject.ext.buildChecksDone = true
}
project.targetCompatibility = minimumJava
project.sourceCompatibility = minimumJava
// set java home for each project, so they dont have to find it in the root project
project.ext.javaHome = project.rootProject.ext.javaHome
project.ext.javaVersion = project.rootProject.ext.javaVersion
}
/** Finds and enforces JAVA_HOME is set */
private static String findJavaHome() {
String javaHome = System.getenv('JAVA_HOME')
if (javaHome == null) {
if (System.getProperty("idea.active") != null) {
// intellij doesn't set JAVA_HOME, so we use the jdk gradle was run with
javaHome = Jvm.current().javaHome
} else {
throw new GradleException('JAVA_HOME must be set to build Elasticsearch')
}
}
return javaHome
}
/** Finds printable java version of the given JAVA_HOME */
private static String findJavaVersionDetails(Project project, String javaHome) {
String versionInfoScript = 'print(' +
'java.lang.System.getProperty("java.vendor") + " " + java.lang.System.getProperty("java.version") + ' +
'" [" + java.lang.System.getProperty("java.vm.name") + " " + java.lang.System.getProperty("java.vm.version") + "]");'
return runJavascript(project, javaHome, versionInfoScript).trim()
}
/** Finds the parsable java specification version */
private static String findJavaSpecificationVersion(Project project, String javaHome) {
String versionScript = 'print(java.lang.System.getProperty("java.specification.version"));'
return runJavascript(project, javaHome, versionScript)
}
/** Finds the parsable java specification version */
private static String findJavaVersion(Project project, String javaHome) {
String versionScript = 'print(java.lang.System.getProperty("java.version"));'
return runJavascript(project, javaHome, versionScript)
}
/** Runs the given javascript using jjs from the jdk, and returns the output */
private static String runJavascript(Project project, String javaHome, String script) {
File tmpScript = File.createTempFile('es-gradle-tmp', '.js')
tmpScript.setText(script, 'UTF-8')
ByteArrayOutputStream output = new ByteArrayOutputStream()
ExecResult result = project.exec {
executable = new File(javaHome, 'bin/jjs')
args tmpScript.toString()
standardOutput = output
errorOutput = new ByteArrayOutputStream()
ignoreExitValue = true // we do not fail so we can first cleanup the tmp file
}
java.nio.file.Files.delete(tmpScript.toPath())
result.assertNormalExitValue()
return output.toString('UTF-8').trim()
}
/** Return the configuration name used for finding transitive deps of the given dependency. */
private static String transitiveDepConfigName(String groupId, String artifactId, String version) {
return "_transitive_${groupId}:${artifactId}:${version}"
}
/**
* Makes dependencies non-transitive.
*
* Gradle allows setting all dependencies as non-transitive very easily.
* Sadly this mechanism does not translate into maven pom generation. In order
* to effectively make the pom act as if it has no transitive dependencies,
* we must exclude each transitive dependency of each direct dependency.
*
* Determining the transitive deps of a dependency which has been resolved as
* non-transitive is difficult because the process of resolving removes the
* transitive deps. To sidestep this issue, we create a configuration per
* direct dependency version. This specially named and unique configuration
* will contain all of the transitive dependencies of this particular
* dependency. We can then use this configuration during pom generation
* to iterate the transitive dependencies and add excludes.
*/
static void configureConfigurations(Project project) {
// fail on any conflicting dependency versions
project.configurations.all({ Configuration configuration ->
if (configuration.name.startsWith('_transitive_')) {
// don't force transitive configurations to not conflict with themselves, since
// we just have them to find *what* transitive deps exist
return
}
configuration.resolutionStrategy.failOnVersionConflict()
})
// force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself
Closure disableTransitiveDeps = { ModuleDependency dep ->
if (!(dep instanceof ProjectDependency) && dep.getGroup() != 'org.elasticsearch') {
dep.transitive = false
// also create a configuration just for this dependency version, so that later
// we can determine which transitive dependencies it has
String depConfig = transitiveDepConfigName(dep.group, dep.name, dep.version)
if (project.configurations.findByName(depConfig) == null) {
project.configurations.create(depConfig)
project.dependencies.add(depConfig, "${dep.group}:${dep.name}:${dep.version}")
}
}
}
project.configurations.compile.dependencies.all(disableTransitiveDeps)
project.configurations.testCompile.dependencies.all(disableTransitiveDeps)
project.configurations.provided.dependencies.all(disableTransitiveDeps)
// add exclusions to the pom directly, for each of the transitive deps of this project's deps
project.modifyPom { MavenPom pom ->
pom.withXml { XmlProvider xml ->
// first find if we have dependencies at all, and grab the node
NodeList depsNodes = xml.asNode().get('dependencies')
if (depsNodes.isEmpty()) {
return
}
// check each dependency for any transitive deps
for (Node depNode : depsNodes.get(0).children()) {
String groupId = depNode.get('groupId').get(0).text()
String artifactId = depNode.get('artifactId').get(0).text()
String version = depNode.get('version').get(0).text()
// collect the transitive deps now that we know what this dependency is
String depConfig = transitiveDepConfigName(groupId, artifactId, version)
Configuration configuration = project.configurations.findByName(depConfig)
if (configuration == null) {
continue // we did not make this dep non-transitive
}
Set<ResolvedArtifact> artifacts = configuration.resolvedConfiguration.resolvedArtifacts
if (artifacts.size() <= 1) {
// this dep has no transitive deps (or the only artifact is itself)
continue
}
// we now know we have something to exclude, so add the exclusion elements
Node exclusions = depNode.appendNode('exclusions')
for (ResolvedArtifact transitiveArtifact : artifacts) {
ModuleVersionIdentifier transitiveDep = transitiveArtifact.moduleVersion.id
if (transitiveDep.group == groupId && transitiveDep.name == artifactId) {
continue; // don't exclude the dependency itself!
}
Node exclusion = exclusions.appendNode('exclusion')
exclusion.appendNode('groupId', transitiveDep.group)
exclusion.appendNode('artifactId', transitiveDep.name)
}
}
}
}
}
/** Adds repositores used by ES dependencies */
static void configureRepositories(Project project) {
RepositoryHandler repos = project.repositories
repos.mavenCentral()
repos.maven {
name 'sonatype-snapshots'
url 'http://oss.sonatype.org/content/repositories/snapshots/'
}
String luceneVersion = VersionProperties.lucene
if (luceneVersion.contains('-snapshot')) {
// extract the revision number from the version with a regex matcher
String revision = (luceneVersion =~ /\w+-snapshot-(\d+)/)[0][1]
repos.maven {
name 'lucene-snapshots'
url "http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/${revision}"
}
}
}
/** Adds compiler settings to the project */
static void configureCompile(Project project) {
project.ext.compactProfile = 'compact3'
project.afterEvaluate {
// fail on all javac warnings
project.tasks.withType(JavaCompile) {
options.fork = true
options.forkOptions.executable = new File(project.javaHome, 'bin/javac')
options.forkOptions.memoryMaximumSize = "1g"
/*
* -path because gradle will send in paths that don't always exist.
* -missing because we have tons of missing @returns and @param.
*/
// don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :)
options.compilerArgs << '-Werror' << '-Xlint:all,-path' << '-Xdoclint:all' << '-Xdoclint:-missing'
// compile with compact 3 profile by default
// NOTE: this is just a compile time check: does not replace testing with a compact3 JRE
if (project.compactProfile != 'full') {
options.compilerArgs << '-profile' << project.compactProfile
}
options.encoding = 'UTF-8'
}
}
}
/** Adds additional manifest info to jars */
static void configureJarManifest(Project project) {
project.tasks.withType(Jar) { Jar jarTask ->
jarTask.doFirst {
// this doFirst is added before the info plugin, therefore it will run
// after the doFirst added by the info plugin, and we can override attributes
jarTask.manifest.attributes(
'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch,
'X-Compile-Lucene-Version': VersionProperties.lucene,
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
'Build-Java-Version': project.javaVersion)
if (jarTask.manifest.attributes.containsKey('Change') == false) {
logger.warn('Building without git revision id.')
jarTask.manifest.attributes('Change': 'N/A')
}
}
}
}
/** Returns a closure of common configuration shared by unit and integration tests. */
static Closure commonTestConfig(Project project) {
return {
jvm "${project.javaHome}/bin/java"
parallelism System.getProperty('tests.jvms', 'auto')
ifNoTests 'fail'
leaveTemporary true
// TODO: why are we not passing maxmemory to junit4?
jvmArg '-Xmx' + System.getProperty('tests.heap.size', '512m')
jvmArg '-Xms' + System.getProperty('tests.heap.size', '512m')
if (JavaVersion.current().isJava7()) {
// some tests need a large permgen, but that only exists on java 7
jvmArg '-XX:MaxPermSize=128m'
}
jvmArg '-XX:MaxDirectMemorySize=512m'
jvmArg '-XX:+HeapDumpOnOutOfMemoryError'
File heapdumpDir = new File(project.buildDir, 'heapdump')
heapdumpDir.mkdirs()
jvmArg '-XX:HeapDumpPath=' + heapdumpDir
argLine System.getProperty('tests.jvm.argline')
// we use './temp' since this is per JVM and tests are forbidden from writing to CWD
systemProperty 'java.io.tmpdir', './temp'
systemProperty 'java.awt.headless', 'true'
systemProperty 'tests.maven', 'true' // TODO: rename this once we've switched to gradle!
systemProperty 'tests.artifact', project.name
systemProperty 'tests.task', path
systemProperty 'tests.security.manager', 'true'
// default test sysprop values
systemProperty 'tests.ifNoTests', 'fail'
systemProperty 'es.logger.level', 'WARN'
for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.getKey().startsWith('tests.') ||
property.getKey().startsWith('es.')) {
systemProperty property.getKey(), property.getValue()
}
}
// System assertions (-esa) are disabled for now because of what looks like a
// JDK bug triggered by Groovy on JDK7. We should look at re-enabling system
// assertions when we upgrade to a new version of Groovy (currently 2.4.4) or
// require JDK8. See https://issues.apache.org/jira/browse/GROOVY-7528.
enableSystemAssertions false
testLogging {
showNumFailuresAtEnd 25
slowTests {
heartbeat 10
summarySize 5
}
stackTraceFilters {
// custom filters: we carefully only omit test infra noise here
contains '.SlaveMain.'
regex(/^(\s+at )(org\.junit\.)/)
// also includes anonymous classes inside these two:
regex(/^(\s+at )(com\.carrotsearch\.randomizedtesting\.RandomizedRunner)/)
regex(/^(\s+at )(com\.carrotsearch\.randomizedtesting\.ThreadLeakControl)/)
regex(/^(\s+at )(com\.carrotsearch\.randomizedtesting\.rules\.)/)
regex(/^(\s+at )(org\.apache\.lucene\.util\.TestRule)/)
regex(/^(\s+at )(org\.apache\.lucene\.util\.AbstractBeforeAfterRule)/)
}
if (System.getProperty('tests.class') != null && System.getProperty('tests.output') == null) {
// if you are debugging, you want to see the output!
outputMode 'always'
} else {
outputMode System.getProperty('tests.output', 'onerror')
}
}
balancers {
executionTime cacheFilename: ".local-${project.version}-${name}-execution-times.log"
}
listeners {
junitReport()
}
exclude '**/*$*.class'
}
}
/** Configures the test task */
static Task configureTest(Project project) {
Task test = project.tasks.getByName('test')
test.configure(commonTestConfig(project))
test.configure {
include '**/*Tests.class'
}
return test
}
private static configurePrecommit(Project project) {
Task precommit = PrecommitTasks.create(project, true)
project.check.dependsOn(precommit)
project.test.mustRunAfter(precommit)
project.dependencyLicenses.dependencies = project.configurations.runtime - project.configurations.provided
}
}

View File

@ -16,26 +16,33 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
package org.elasticsearch.index.settings;
import org.elasticsearch.common.inject.BindingAnnotation;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.*
import org.gradle.internal.nativeintegration.filesystem.Chmod
import java.io.File
import javax.inject.Inject
/**
*
* Creates an empty directory.
*/
class EmptyDirTask extends DefaultTask {
@Input
Object dir
@BindingAnnotation
@Target({FIELD, PARAMETER})
@Retention(RUNTIME)
@Documented
public @interface IndexSettings {
@Input
int dirMode = 0755
@TaskAction
void create() {
dir = dir as File
dir.mkdirs()
getChmod().chmod(dir, dirMode)
}
@Inject
Chmod getChmod() {
throw new UnsupportedOperationException()
}
}

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.*
import java.io.File
/**
* Creates a file and sets it contents to something.
*/
class FileContentsTask extends DefaultTask {
/**
* The file to be built. Must be of type File to make @OutputFile happy.
*/
@OutputFile
File file
@Input
Object contents
/**
* The file to be built. Takes any objecct and coerces to a file.
*/
void setFile(Object file) {
this.file = file as File
}
@TaskAction
void setContents() {
file = file as File
file.text = contents.toString()
}
}

View File

@ -17,26 +17,26 @@
* under the License.
*/
package org.apache.lucene.queryparser.classic;
package org.elasticsearch.gradle
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.MissingQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.gradle.api.GradleException
import org.gradle.api.tasks.Exec
/**
*
* A wrapper around gradle's Exec task to capture output and log on error.
*/
public class MissingFieldQueryExtension implements FieldQueryExtension {
public static final String NAME = "_missing_";
@Override
public Query query(QueryShardContext context, String queryText) {
Query query = MissingQueryBuilder.newFilter(context, queryText, MissingQueryBuilder.DEFAULT_EXISTENCE_VALUE, MissingQueryBuilder.DEFAULT_NULL_VALUE);
if (query != null) {
return new ConstantScoreQuery(query);
class LoggedExec extends Exec {
LoggedExec() {
if (logger.isInfoEnabled() == false) {
standardOutput = new ByteArrayOutputStream()
errorOutput = standardOutput
ignoreExitValue = true
doLast {
if (execResult.exitValue != 0) {
standardOutput.toString('UTF-8').eachLine { line -> logger.error(line) }
throw new GradleException("Process '${executable} ${args.join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
}
}
}
return null;
}
}

View File

@ -0,0 +1,45 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
import org.apache.tools.ant.filters.ReplaceTokens
import org.gradle.api.file.CopySpec
/**
* Gradle provides "expansion" functionality using groovy's SimpleTemplatingEngine (TODO: check name).
* However, it allows substitutions of the form {@code $foo} (no curlies). Rest tests provide
* some substitution from the test runner, which this form is used for.
*
* This class provides a helper to do maven filtering, where only the form {@code $\{foo\}} is supported.
*
* TODO: we should get rid of this hack, and make the rest tests use some other identifier
* for builtin vars
*/
class MavenFilteringHack {
/**
* Adds a filter to the given copy spec that will substitute maven variables.
* @param CopySpec
*/
static void filter(CopySpec copySpec, Map substitutions) {
Map mavenSubstitutions = substitutions.collectEntries() {
key, value -> ["{${key}".toString(), value.toString()]
}
copySpec.filter(ReplaceTokens, tokens: mavenSubstitutions, beginToken: '$', endToken: '}')
}
}

View File

@ -0,0 +1,41 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
/**
* Accessor for shared dependency versions used by elasticsearch, namely the elasticsearch and lucene versions.
*/
class VersionProperties {
static final String elasticsearch
static final String lucene
static final Map<String, String> versions = new HashMap<>()
static {
Properties props = new Properties()
InputStream propsStream = VersionProperties.class.getResourceAsStream('/version.properties')
if (propsStream == null) {
throw new RuntimeException('/version.properties resource missing')
}
props.load(propsStream)
elasticsearch = props.getProperty('elasticsearch')
lucene = props.getProperty('lucene')
for (String property : props.stringPropertyNames()) {
versions.put(property, props.getProperty(property))
}
}
}

View File

@ -0,0 +1,124 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.bundling.Zip
/**
* Encapsulates build configuration for an Elasticsearch plugin.
*/
public class PluginBuildPlugin extends BuildPlugin {
@Override
public void apply(Project project) {
super.apply(project)
configureDependencies(project)
// this afterEvaluate must happen before the afterEvaluate added by integTest creation,
// so that the file name resolution for installing the plugin will be setup
project.afterEvaluate {
String name = project.pluginProperties.extension.name
project.jar.baseName = name
project.bundlePlugin.baseName = name
project.integTest.dependsOn(project.bundlePlugin)
project.tasks.run.dependsOn(project.bundlePlugin)
if (project.path.startsWith(':modules:')) {
project.integTest.clusterConfig.module(project)
project.tasks.run.clusterConfig.module(project)
} else {
project.integTest.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
project.tasks.run.clusterConfig.plugin(name, project.bundlePlugin.outputs.files)
}
}
createIntegTestTask(project)
createBundleTask(project)
project.tasks.create('run', RunTask) // allow running ES with this plugin in the foreground of a build
}
private static void configureDependencies(Project project) {
project.dependencies {
provided "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
testCompile "org.elasticsearch:test-framework:${project.versions.elasticsearch}"
// we "upgrade" these optional deps to provided for plugins, since they will run
// with a full elasticsearch server that includes optional deps
provided "com.spatial4j:spatial4j:${project.versions.spatial4j}"
provided "com.vividsolutions:jts:${project.versions.jts}"
provided "log4j:log4j:${project.versions.log4j}"
provided "log4j:apache-log4j-extras:${project.versions.log4j}"
provided "org.slf4j:slf4j-api:${project.versions.slf4j}"
provided "net.java.dev.jna:jna:${project.versions.jna}"
}
}
/** Adds an integTest task which runs rest tests */
private static void createIntegTestTask(Project project) {
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.mustRunAfter(project.precommit, project.test)
project.check.dependsOn(integTest)
}
/**
* Adds a bundlePlugin task which builds the zip containing the plugin jars,
* metadata, properties, and packaging files
*/
private static void createBundleTask(Project project) {
File pluginMetadata = project.file('src/main/plugin-metadata')
// create a task to build the properties file for this plugin
PluginPropertiesTask buildProperties = project.tasks.create('pluginProperties', PluginPropertiesTask.class)
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
testSourceSet.output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
testSourceSet.resources.srcDir(pluginMetadata)
// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
from buildProperties // plugin properties file
from pluginMetadata // metadata (eg custom security policy)
from project.jar // this plugin's jar
from project.configurations.runtime - project.configurations.provided // the dep jars
// extra files for the plugin to go into the zip
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
from('src/main') {
include 'config/**'
include 'bin/**'
}
from('src/site') {
include '_site/**'
}
}
project.assemble.dependsOn(bundle)
// remove jar from the archives (things that will be published), and set it to the zip
project.configurations.archives.artifacts.removeAll { it.archiveTask.is project.jar }
project.artifacts.add('archives', bundle)
// also make the zip the default artifact (used when depending on this project)
project.configurations.getByName('default').extendsFrom = []
project.artifacts.add('default', bundle)
}
}

View File

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.gradle.api.Project
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.Optional
/**
* A container for plugin properties that will be written to the plugin descriptor, for easy
* manipulation in the gradle DSL.
*/
class PluginPropertiesExtension {
@Input
String name
@Input
String version
@Input
String description
@Input
boolean jvm = true
@Input
String classname
@Input
boolean site = false
@Input
boolean isolated = true
PluginPropertiesExtension(Project project) {
name = project.name
version = project.version
}
}

View File

@ -0,0 +1,83 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.plugin
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Task
import org.gradle.api.tasks.Copy
/**
* Creates a plugin descriptor.
*/
class PluginPropertiesTask extends Copy {
PluginPropertiesExtension extension
File generatedResourcesDir = new File(project.projectDir, 'generated-resources')
PluginPropertiesTask() {
File templateFile = new File(project.buildDir, 'templates/plugin-descriptor.properties')
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
doLast {
InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream('/plugin-descriptor.properties')
templateFile.parentFile.mkdirs()
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
}
}
dependsOn(copyPluginPropertiesTemplate)
extension = project.extensions.create('esplugin', PluginPropertiesExtension, project)
project.clean.delete(generatedResourcesDir)
project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
throw new InvalidUserDataException('name is a required setting for esplugin')
}
if (extension.description == null) {
throw new InvalidUserDataException('description is a required setting for esplugin')
}
if (extension.jvm && extension.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin with jvm=true')
}
doFirst {
if (extension.jvm && extension.isolated == false) {
String warning = "WARNING: Disabling plugin isolation in ${project.path} is deprecated and will be removed in the future"
logger.warn("${'=' * warning.length()}\n${warning}\n${'=' * warning.length()}")
}
}
// configure property substitution
from(templateFile)
into(generatedResourcesDir)
expand(generateSubstitutions())
}
}
Map generateSubstitutions() {
return [
'name': extension.name,
'description': extension.description,
'version': extension.version,
'elasticsearchVersion': VersionProperties.elasticsearch,
'javaVersion': project.targetCompatibility as String,
'jvm': extension.jvm as String,
'site': extension.site as String,
'isolated': extension.isolated as String,
'classname': extension.jvm ? extension.classname : 'NA'
]
}
}

View File

@ -0,0 +1,231 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit
import org.gradle.api.*
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputDirectory
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.TaskAction
import java.nio.file.Files
import java.security.MessageDigest
import java.util.regex.Matcher
import java.util.regex.Pattern
/**
* A task to check licenses for dependencies.
*
* There are two parts to the check:
* <ul>
* <li>LICENSE and NOTICE files</li>
* <li>SHA checksums for each dependency jar</li>
* </ul>
*
* The directory to find the license and sha files in defaults to the dir @{code licenses}
* in the project directory for this task. You can override this directory:
* <pre>
* dependencyLicenses {
* licensesDir = project.file('mybetterlicensedir')
* }
* </pre>
*
* The jar files to check default to the dependencies from the default configuration. You
* can override this, for example, to only check compile dependencies:
* <pre>
* dependencyLicenses {
* dependencies = project.configurations.compile
* }
* </pre>
*
* Every jar must have a {@code .sha1} file in the licenses dir. These can be managed
* automatically using the {@code updateShas} helper task that is created along
* with this task. It will add {@code .sha1} files for new jars that are in dependencies
* and remove old {@code .sha1} files that are no longer needed.
*
* Every jar must also have a LICENSE and NOTICE file. However, multiple jars can share
* LICENSE and NOTICE files by mapping a pattern to the same name.
* <pre>
* dependencyLicenses {
* mapping from: &#47;lucene-.*&#47;, to: 'lucene'
* }
* </pre>
*/
public class DependencyLicensesTask extends DefaultTask {
static final String SHA_EXTENSION = '.sha1'
// TODO: we should be able to default this to eg compile deps, but we need to move the licenses
// check from distribution to core (ie this should only be run on java projects)
/** A collection of jar files that should be checked. */
@InputFiles
public FileCollection dependencies
/** The directory to find the license and sha files in. */
@InputDirectory
public File licensesDir = new File(project.projectDir, 'licenses')
/** A map of patterns to prefix, used to find the LICENSE and NOTICE file. */
private LinkedHashMap<String, String> mappings = new LinkedHashMap<>()
/**
* Add a mapping from a regex pattern for the jar name, to a prefix to find
* the LICENSE and NOTICE file for that jar.
*/
@Input
public void mapping(Map<String, String> props) {
String from = props.remove('from')
if (from == null) {
throw new InvalidUserDataException('Missing "from" setting for license name mapping')
}
String to = props.remove('to')
if (to == null) {
throw new InvalidUserDataException('Missing "to" setting for license name mapping')
}
if (props.isEmpty() == false) {
throw new InvalidUserDataException("Unknown properties for mapping on dependencyLicenses: ${props.keySet()}")
}
mappings.put(from, to)
}
@TaskAction
public void checkDependencies() {
if (dependencies.isEmpty()) {
if (licensesDir.exists()) {
throw new GradleException("Licenses dir ${licensesDir} exists, but there are no dependencies")
}
return // no dependencies to check
} else if (licensesDir.exists() == false) {
throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies")
}
// order is the same for keys and values iteration since we use a linked hashmap
List<String> mapped = new ArrayList<>(mappings.values())
Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')')
Map<String, Integer> licenses = new HashMap<>()
Map<String, Integer> notices = new HashMap<>()
Set<File> shaFiles = new HashSet<File>()
licensesDir.eachFile {
String name = it.getName()
if (name.endsWith(SHA_EXTENSION)) {
shaFiles.add(it)
} else if (name.endsWith('-LICENSE') || name.endsWith('-LICENSE.txt')) {
// TODO: why do we support suffix of LICENSE *and* LICENSE.txt??
licenses.put(name, 0)
} else if (name.contains('-NOTICE') || name.contains('-NOTICE.txt')) {
notices.put(name, 0)
}
}
for (File dependency : dependencies) {
String jarName = dependency.getName()
logger.info("Checking license/notice/sha for " + jarName)
checkSha(dependency, jarName, shaFiles)
String name = jarName - ~/\-\d+.*/
Matcher match = mappingsPattern.matcher(name)
if (match.matches()) {
int i = 0
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
logger.info("Mapped dependency name ${name} to ${mapped.get(i)} for license check")
name = mapped.get(i)
}
checkFile(name, jarName, licenses, 'LICENSE')
checkFile(name, jarName, notices, 'NOTICE')
}
licenses.each { license, count ->
if (count == 0) {
throw new GradleException("Unused license ${license}")
}
}
notices.each { notice, count ->
if (count == 0) {
throw new GradleException("Unused notice ${notice}")
}
}
if (shaFiles.isEmpty() == false) {
throw new GradleException("Unused sha files found: \n${shaFiles.join('\n')}")
}
}
private void checkSha(File jar, String jarName, Set<File> shaFiles) {
File shaFile = new File(licensesDir, jarName + SHA_EXTENSION)
if (shaFile.exists() == false) {
throw new GradleException("Missing SHA for ${jarName}. Run 'gradle updateSHAs' to create")
}
// TODO: shouldn't have to trim, sha files should not have trailing newline
String expectedSha = shaFile.getText('UTF-8').trim()
String sha = MessageDigest.getInstance("SHA-1").digest(jar.getBytes()).encodeHex().toString()
if (expectedSha.equals(sha) == false) {
throw new GradleException("SHA has changed! Expected ${expectedSha} for ${jarName} but got ${sha}. " +
"\nThis usually indicates a corrupt dependency cache or artifacts changed upstream." +
"\nEither wipe your cache, fix the upstream artifact, or delete ${shaFile} and run updateShas")
}
shaFiles.remove(shaFile)
}
private void checkFile(String name, String jarName, Map<String, Integer> counters, String type) {
String fileName = "${name}-${type}"
Integer count = counters.get(fileName)
if (count == null) {
// try the other suffix...TODO: get rid of this, just support ending in .txt
fileName = "${fileName}.txt"
counters.get(fileName)
}
count = counters.get(fileName)
if (count == null) {
throw new GradleException("Missing ${type} for ${jarName}, expected in ${fileName}")
}
counters.put(fileName, count + 1)
}
/** A helper task to update the sha files in the license dir. */
public static class UpdateShasTask extends DefaultTask {
private DependencyLicensesTask parentTask
@TaskAction
public void updateShas() {
Set<File> shaFiles = new HashSet<File>()
parentTask.licensesDir.eachFile {
String name = it.getName()
if (name.endsWith(SHA_EXTENSION)) {
shaFiles.add(it)
}
}
for (File dependency : parentTask.dependencies) {
String jarName = dependency.getName()
File shaFile = new File(parentTask.licensesDir, jarName + SHA_EXTENSION)
if (shaFile.exists() == false) {
logger.lifecycle("Adding sha for ${jarName}")
String sha = MessageDigest.getInstance("SHA-1").digest(dependency.getBytes()).encodeHex().toString()
shaFile.setText(sha, 'UTF-8')
} else {
shaFiles.remove(shaFile)
}
}
shaFiles.each { shaFile ->
logger.lifecycle("Removing unused sha ${shaFile.getName()}")
Files.delete(shaFile.toPath())
}
}
}
}

View File

@ -0,0 +1,127 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit
import org.gradle.api.DefaultTask
import org.gradle.api.GradleException
import org.gradle.api.InvalidUserDataException
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.OutputFile
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.TaskAction
import org.gradle.api.tasks.util.PatternFilterable
import org.gradle.api.tasks.util.PatternSet
import java.util.regex.Pattern
/**
* Checks for patterns in source files for the project which are forbidden.
*/
public class ForbiddenPatternsTask extends DefaultTask {
/** The rules: a map from the rule name, to a rule regex pattern. */
private Map<String,String> patterns = new LinkedHashMap<>()
/** A pattern set of which files should be checked. */
private PatternFilterable filesFilter = new PatternSet()
@OutputFile
File outputMarker = new File(project.buildDir, "markers/forbiddenPatterns")
public ForbiddenPatternsTask() {
description = 'Checks source files for invalid patterns like nocommits or tabs'
// we always include all source files, and exclude what should not be checked
filesFilter.include('**')
// exclude known binary extensions
filesFilter.exclude('**/*.gz')
filesFilter.exclude('**/*.ico')
filesFilter.exclude('**/*.jar')
filesFilter.exclude('**/*.zip')
filesFilter.exclude('**/*.jks')
filesFilter.exclude('**/*.crt')
filesFilter.exclude('**/*.png')
// add mandatory rules
patterns.put('nocommit', /nocommit/)
patterns.put('tab', /\t/)
inputs.property("excludes", filesFilter.excludes)
inputs.property("rules", patterns)
}
/** Adds a file glob pattern to be excluded */
public void exclude(String... excludes) {
filesFilter.exclude(excludes)
}
/** Adds a pattern to forbid. T */
void rule(Map<String,String> props) {
String name = props.remove('name')
if (name == null) {
throw new InvalidUserDataException('Missing [name] for invalid pattern rule')
}
String pattern = props.remove('pattern')
if (pattern == null) {
throw new InvalidUserDataException('Missing [pattern] for invalid pattern rule')
}
if (props.isEmpty() == false) {
throw new InvalidUserDataException("Unknown arguments for ForbiddenPatterns rule mapping: ${props.keySet()}")
}
// TODO: fail if pattern contains a newline, it won't work (currently)
patterns.put(name, pattern)
}
/** Returns the files this task will check */
@InputFiles
FileCollection files() {
List<FileCollection> collections = new ArrayList<>()
for (SourceSet sourceSet : project.sourceSets) {
collections.add(sourceSet.allSource.matching(filesFilter))
}
return project.files(collections.toArray())
}
@TaskAction
void checkInvalidPatterns() {
Pattern allPatterns = Pattern.compile('(' + patterns.values().join(')|(') + ')')
List<String> failures = new ArrayList<>()
for (File f : files()) {
f.eachLine('UTF-8') { String line, int lineNumber ->
if (allPatterns.matcher(line).find()) {
addErrorMessages(failures, f, line, lineNumber)
}
}
}
if (failures.isEmpty() == false) {
throw new GradleException('Found invalid patterns:\n' + failures.join('\n'))
}
outputMarker.setText('done', 'UTF-8')
}
// iterate through patterns to find the right ones for nice error messages
void addErrorMessages(List<String> failures, File f, String line, int lineNumber) {
String path = project.getRootProject().projectDir.toURI().relativize(f.toURI()).toString()
for (Map.Entry<String,String> pattern : patterns.entrySet()) {
if (Pattern.compile(pattern.value).matcher(line).find()) {
failures.add('- ' + pattern.key + ' on line ' + lineNumber + ' of ' + path)
}
}
}
}

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit
import org.elasticsearch.gradle.LoggedExec
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.OutputFile
/**
* Runs CheckJarHell on a classpath.
*/
public class JarHellTask extends LoggedExec {
/**
* We use a simple "marker" file that we touch when the task succeeds
* as the task output. This is compared against the modified time of the
* inputs (ie the jars/class files).
*/
@OutputFile
public File successMarker = new File(project.buildDir, 'markers/jarHell')
/** The classpath to run jarhell check on, defaults to the test runtime classpath */
@InputFile
public FileCollection classpath = project.sourceSets.test.runtimeClasspath
public JarHellTask() {
project.afterEvaluate {
dependsOn(classpath)
description = "Runs CheckJarHell on ${classpath}"
executable = new File(project.javaHome, 'bin/java')
doFirst({
/* JarHell doesn't like getting directories that don't exist but
gradle isn't especially careful about that. So we have to do it
filter it ourselves. */
FileCollection taskClasspath = classpath.filter { it.exists() }
args('-cp', taskClasspath.asPath, 'org.elasticsearch.bootstrap.JarHell')
})
doLast({
successMarker.parentFile.mkdirs()
successMarker.setText("", 'UTF-8')
})
}
}
}

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.plugins.JavaBasePlugin
/**
* Validation tasks which should be run before committing. These run before tests.
*/
class PrecommitTasks {
/** Adds a precommit task, which depends on non-test verification tasks. */
public static Task create(Project project, boolean includeDependencyLicenses) {
List<Task> precommitTasks = [
configureForbiddenApis(project),
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
project.tasks.create('jarHell', JarHellTask.class)]
// tasks with just tests don't need dependency licenses, so this flag makes adding
// the task optional
if (includeDependencyLicenses) {
DependencyLicensesTask dependencyLicenses = project.tasks.create('dependencyLicenses', DependencyLicensesTask.class)
precommitTasks.add(dependencyLicenses)
// we also create the updateShas helper task that is associated with dependencyLicenses
UpdateShasTask updateShas = project.tasks.create('updateShas', UpdateShasTask.class)
updateShas.parentTask = dependencyLicenses
}
Map<String, Object> precommitOptions = [
name: 'precommit',
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Runs all non-test checks.',
dependsOn: precommitTasks
]
return project.tasks.create(precommitOptions)
}
private static Task configureForbiddenApis(Project project) {
project.pluginManager.apply(ForbiddenApisPlugin.class)
project.forbiddenApis {
internalRuntimeForbidden = true
failOnUnsupportedJava = false
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated']
signaturesURLs = [getClass().getResource('/forbidden/all-signatures.txt')]
suppressAnnotations = ['**.SuppressForbidden']
}
Task mainForbidden = project.tasks.findByName('forbiddenApisMain')
if (mainForbidden != null) {
mainForbidden.configure {
bundledSignatures += 'jdk-system-out'
signaturesURLs += getClass().getResource('/forbidden/core-signatures.txt')
}
}
Task testForbidden = project.tasks.findByName('forbiddenApisTest')
if (testForbidden != null) {
testForbidden.configure {
signaturesURLs += getClass().getResource('/forbidden/test-signatures.txt')
}
}
Task forbiddenApis = project.tasks.findByName('forbiddenApis')
forbiddenApis.group = "" // clear group, so this does not show up under verification tasks
return forbiddenApis
}
}

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.TaskAction
import java.nio.file.Files
import java.security.MessageDigest
/**
* A task to update shas used by {@code DependencyLicensesCheck}
*/
public class UpdateShasTask extends DefaultTask {
/** The parent dependency licenses task to use configuration from */
public DependencyLicensesTask parentTask
public UpdateShasTask() {
description = 'Updates the sha files for the dependencyLicenses check'
onlyIf { parentTask.licensesDir.exists() }
}
@TaskAction
public void updateShas() {
Set<File> shaFiles = new HashSet<File>()
parentTask.licensesDir.eachFile {
String name = it.getName()
if (name.endsWith(DependencyLicensesTask.SHA_EXTENSION)) {
shaFiles.add(it)
}
}
for (File dependency : parentTask.dependencies) {
String jarName = dependency.getName()
File shaFile = new File(parentTask.licensesDir, jarName + DependencyLicensesTask.SHA_EXTENSION)
if (shaFile.exists() == false) {
logger.lifecycle("Adding sha for ${jarName}")
String sha = MessageDigest.getInstance("SHA-1").digest(dependency.getBytes()).encodeHex().toString()
shaFile.setText(sha, 'UTF-8')
} else {
shaFiles.remove(shaFile)
}
}
shaFiles.each { shaFile ->
logger.lifecycle("Removing unused sha ${shaFile.getName()}")
Files.delete(shaFile.toPath())
}
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.gradle.api.GradleException
import org.gradle.api.Project
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.Input
/** Configuration for an elasticsearch cluster, used for integration tests. */
class ClusterConfiguration {
@Input
String distribution = 'integ-test-zip'
@Input
int numNodes = 1
@Input
int httpPort = 0
@Input
int transportPort = 0
@Input
boolean daemonize = true
@Input
boolean debug = false
@Input
String jvmArgs = System.getProperty('tests.jvm.argline', '')
/**
* A closure to call before the cluster is considered ready. The closure is passed the node info,
* as well as a groovy AntBuilder, to enable running ant condition checks. The default wait
* condition is for http on the http port.
*/
@Input
Closure waitCondition = { NodeInfo node, AntBuilder ant ->
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}",
dest: tmpFile.toString(),
ignoreerrors: true, // do not fail on error, so logging buffers can be flushed by the wait task
retries: 10)
return tmpFile.exists()
}
Map<String, String> systemProperties = new HashMap<>()
Map<String, String> settings = new HashMap<>()
// map from destination path, to source file
Map<String, Object> extraConfigFiles = new HashMap<>()
LinkedHashMap<String, Object> plugins = new LinkedHashMap<>()
List<Project> modules = new ArrayList<>()
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
@Input
void systemProperty(String property, String value) {
systemProperties.put(property, value)
}
@Input
void setting(String name, String value) {
settings.put(name, value)
}
@Input
void plugin(String name, FileCollection file) {
plugins.put(name, file)
}
@Input
void plugin(String name, Project pluginProject) {
plugins.put(name, pluginProject)
}
/** Add a module to the cluster. The project must be an esplugin and have a single zip default artifact. */
@Input
void module(Project moduleProject) {
modules.add(moduleProject)
}
@Input
void setupCommand(String name, Object... args) {
setupCommands.put(name, args)
}
/**
* Add an extra configuration file. The path is relative to the config dir, and the sourceFile
* is anything accepted by project.file()
*/
@Input
void extraConfigFile(String path, Object sourceFile) {
if (path == 'elasticsearch.yml') {
throw new GradleException('Overwriting elasticsearch.yml is not allowed, add additional settings using cluster { setting "foo", "bar" }')
}
extraConfigFiles.put(path, sourceFile)
}
}

View File

@ -0,0 +1,565 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.apache.tools.ant.DefaultLogger
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.api.*
import org.gradle.api.artifacts.Configuration
import org.gradle.api.file.FileCollection
import org.gradle.api.logging.Logger
import org.gradle.api.tasks.*
import java.nio.file.Paths
/**
* A helper for creating tasks to build a cluster that is used by a task, and tear down the cluster when the task is finished.
*/
class ClusterFormationTasks {
/**
* Adds dependent tasks to the given task to start and stop a cluster with the given configuration.
*
* Returns an object that will resolve at execution time of the given task to a uri for the cluster.
*/
static Object setup(Project project, Task task, ClusterConfiguration config) {
if (task.getEnabled() == false) {
// no need to add cluster formation tasks if the task won't run!
return
}
configureDistributionDependency(project, config.distribution)
List<Task> startTasks = []
List<NodeInfo> nodes = []
for (int i = 0; i < config.numNodes; ++i) {
NodeInfo node = new NodeInfo(config, i, project, task)
nodes.add(node)
startTasks.add(configureNode(project, task, node))
}
Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)
task.dependsOn(wait)
// delay the resolution of the uri by wrapping in a closure, so it is not used until read for tests
return "${-> nodes[0].transportUri()}"
}
/** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro) {
String elasticsearchVersion = VersionProperties.elasticsearch
String packaging = distro
if (distro == 'tar') {
packaging = 'tar.gz'
} else if (distro == 'integ-test-zip') {
packaging = 'zip'
}
project.configurations {
elasticsearchDistro
}
project.dependencies {
elasticsearchDistro "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}"
}
}
/**
* Adds dependent tasks to start an elasticsearch cluster before the given task is executed,
* and stop it after it has finished executing.
*
* The setup of the cluster involves the following:
* <ol>
* <li>Cleanup the extraction directory</li>
* <li>Extract a fresh copy of elasticsearch</li>
* <li>Write an elasticsearch.yml config file</li>
* <li>Copy plugins that will be installed to a temporary dir (which contains spaces)</li>
* <li>Install plugins</li>
* <li>Run additional setup commands</li>
* <li>Start elasticsearch<li>
* </ol>
*
* @return a task which starts the node.
*/
static Task configureNode(Project project, Task task, NodeInfo node) {
// tasks are chained so their execution order is maintained
Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: task.dependsOn.collect()) {
delete node.homeDir
delete node.cwd
doLast {
node.cwd.mkdirs()
}
}
setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node)
setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node)
setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node)
setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node)
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
// install modules
for (Project module : node.config.modules) {
String actionName = pluginTaskName('install', module.name, 'Module')
setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module)
}
// install plugins
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
setup = configureInstallPluginTask(taskName(task, node, actionName), project, setup, node, plugin.getValue())
}
// extra setup commands
for (Map.Entry<String, Object[]> command : node.config.setupCommands.entrySet()) {
// the first argument is the actual script name, relative to home
Object[] args = command.getValue().clone()
args[0] = new File(node.homeDir, args[0].toString())
setup = configureExecTask(taskName(task, node, command.getKey()), project, setup, node, args)
}
Task start = configureStartTask(taskName(task, node, 'start'), project, setup, node)
if (node.config.daemonize) {
// if we are running in the background, make sure to stop the server when the task completes
Task stop = configureStopTask(taskName(task, node, 'stop'), project, [], node)
task.finalizedBy(stop)
}
return start
}
/** Adds a task to extract the elasticsearch distribution */
static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node) {
List extractDependsOn = [project.configurations.elasticsearchDistro, setup]
/* project.configurations.elasticsearchDistro.singleFile will be an
external artifact if this is being run by a plugin not living in the
elasticsearch source tree. If this is a plugin built in the
elasticsearch source tree or this is a distro in the elasticsearch
source tree then this should be the version of elasticsearch built
by the source tree. If it isn't then Bad Things(TM) will happen. */
Task extract
switch (node.config.distribution) {
case 'integ-test-zip':
case 'zip':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
into node.baseDir
}
break;
case 'tar':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from {
project.tarTree(project.resources.gzip(project.configurations.elasticsearchDistro.singleFile))
}
into node.baseDir
}
break;
case 'rpm':
File rpmDatabase = new File(node.baseDir, 'rpm-database')
File rpmExtracted = new File(node.baseDir, 'rpm-extracted')
/* Delay reading the location of the rpm file until task execution */
Object rpm = "${ -> project.configurations.elasticsearchDistro.singleFile}"
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers',
'--dbpath', rpmDatabase,
'--relocate', "/=${rpmExtracted}",
'-i', rpm
doFirst {
rpmDatabase.deleteDir()
rpmExtracted.deleteDir()
}
}
break;
case 'deb':
/* Delay reading the location of the deb file until task execution */
File debExtracted = new File(node.baseDir, 'deb-extracted')
Object deb = "${ -> project.configurations.elasticsearchDistro.singleFile}"
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
commandLine 'dpkg-deb', '-x', deb, debExtracted
doFirst {
debExtracted.deleteDir()
}
}
break;
default:
throw new InvalidUserDataException("Unknown distribution: ${node.config.distribution}")
}
return extract
}
/** Adds a task to write elasticsearch.yml for the given node configuration */
static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node) {
Map esConfig = [
'cluster.name' : node.clusterName,
'pidfile' : node.pidFile,
'path.repo' : "${node.homeDir}/repo",
'path.shared_data' : "${node.homeDir}/../",
// Define a node attribute so we can test that it exists
'node.testattr' : 'test',
'repositories.url.allowed_urls': 'http://snapshot.test*'
]
if (node.config.numNodes == 1) {
esConfig['http.port'] = node.config.httpPort
esConfig['transport.tcp.port'] = node.config.transportPort
} else {
// TODO: fix multi node so it doesn't use hardcoded prots
esConfig['http.port'] = 9400 + node.nodeNum
esConfig['transport.tcp.port'] = 9500 + node.nodeNum
esConfig['discovery.zen.ping.unicast.hosts'] = (0..<node.config.numNodes).collect{"localhost:${9500 + it}"}.join(',')
}
esConfig.putAll(node.config.settings)
Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
writeConfig.doFirst {
File configFile = new File(node.confDir, 'elasticsearch.yml')
logger.info("Configuring ${configFile}")
configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8')
}
}
static Task configureExtraConfigFilesTask(String name, Project project, Task setup, NodeInfo node) {
if (node.config.extraConfigFiles.isEmpty()) {
return setup
}
Copy copyConfig = project.tasks.create(name: name, type: Copy, dependsOn: setup)
copyConfig.into(new File(node.homeDir, 'config')) // copy must always have a general dest dir, even though we don't use it
for (Map.Entry<String,Object> extraConfigFile : node.config.extraConfigFiles.entrySet()) {
copyConfig.doFirst {
// make sure the copy won't be a no-op or act on a directory
File srcConfigFile = project.file(extraConfigFile.getValue())
if (srcConfigFile.isDirectory()) {
throw new GradleException("Source for extraConfigFile must be a file: ${srcConfigFile}")
}
if (srcConfigFile.exists() == false) {
throw new GradleException("Source file for extraConfigFile does not exist: ${srcConfigFile}")
}
}
File destConfigFile = new File(node.homeDir, 'config/' + extraConfigFile.getKey())
copyConfig.into(destConfigFile.canonicalFile.parentFile)
.from({ extraConfigFile.getValue() }) // wrap in closure to delay resolution to execution time
.rename { destConfigFile.name }
}
return copyConfig
}
/**
* Adds a task to copy plugins to a temp dir, which they will later be installed from.
*
* For each plugin, if the plugin has rest spec apis in its tests, those api files are also copied
* to the test resources for this project.
*/
static Task configureCopyPluginsTask(String name, Project project, Task setup, NodeInfo node) {
if (node.config.plugins.isEmpty()) {
return setup
}
Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup)
List<FileCollection> pluginFiles = []
for (Map.Entry<String, Object> plugin : node.config.plugins.entrySet()) {
FileCollection pluginZip
if (plugin.getValue() instanceof Project) {
Project pluginProject = plugin.getValue()
if (pluginProject.plugins.hasPlugin(PluginBuildPlugin) == false) {
throw new GradleException("Task ${name} cannot project ${pluginProject.path} which is not an esplugin")
}
String configurationName = "_plugin_${pluginProject.path}"
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
}
project.dependencies.add(configurationName, pluginProject)
setup.dependsOn(pluginProject.tasks.bundlePlugin)
pluginZip = configuration
// also allow rest tests to use the rest spec from the plugin
Copy copyRestSpec = null
for (File resourceDir : pluginProject.sourceSets.test.resources.srcDirs) {
File restApiDir = new File(resourceDir, 'rest-api-spec/api')
if (restApiDir.exists() == false) continue
if (copyRestSpec == null) {
copyRestSpec = project.tasks.create(name: pluginTaskName('copy', plugin.getKey(), 'PluginRestSpec'), type: Copy)
copyPlugins.dependsOn(copyRestSpec)
copyRestSpec.into(project.sourceSets.test.output.resourcesDir)
}
copyRestSpec.from(resourceDir).include('rest-api-spec/api/**')
}
} else {
pluginZip = plugin.getValue()
}
pluginFiles.add(pluginZip)
}
copyPlugins.into(node.pluginsTmpDir)
copyPlugins.from(pluginFiles)
return copyPlugins
}
static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) {
if (node.config.distribution != 'integ-test-zip') {
throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!")
}
if (module.plugins.hasPlugin(PluginBuildPlugin) == false) {
throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin")
}
Copy installModule = project.tasks.create(name, Copy.class)
installModule.dependsOn(setup)
installModule.into(new File(node.homeDir, "modules/${module.name}"))
installModule.from({ project.zipTree(module.tasks.bundlePlugin.outputs.files.singleFile) })
return installModule
}
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Object plugin) {
FileCollection pluginZip
if (plugin instanceof Project) {
pluginZip = project.configurations.getByName("_plugin_${plugin.path}")
} else {
pluginZip = plugin
}
// delay reading the file location until execution time by wrapping in a closure within a GString
String file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}"
Object[] args = [new File(node.homeDir, 'bin/plugin'), 'install', file]
return configureExecTask(name, project, setup, node, args)
}
/** Adds a task to execute a command to help setup the cluster */
static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) {
return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) {
workingDir node.cwd
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'cmd'
args '/C', 'call'
} else {
executable 'sh'
}
args execArgs
}
}
/** Adds a task to start an elasticsearch node with the given configuration */
static Task configureStartTask(String name, Project project, Task setup, NodeInfo node) {
// this closure is converted into ant nodes by groovy's AntBuilder
Closure antRunner = { AntBuilder ant ->
ant.exec(executable: node.executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') {
node.env.each { key, value -> env(key: key, value: value) }
node.args.each { arg(value: it) }
}
}
// this closure is the actual code to run elasticsearch
Closure elasticsearchRunner = {
// Due to how ant exec works with the spawn option, we lose all stdout/stderr from the
// process executed. To work around this, when spawning, we wrap the elasticsearch start
// command inside another shell script, which simply internally redirects the output
// of the real elasticsearch script. This allows ant to keep the streams open with the
// dummy process, but us to have the output available if there is an error in the
// elasticsearch start script
if (node.config.daemonize) {
node.writeWrapperScript()
}
// we must add debug options inside the closure so the config is read at execution time, as
// gradle task options are not processed until the end of the configuration phase
if (node.config.debug) {
println 'Running elasticsearch in debug mode, suspending until connected on port 8000'
node.env['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
}
node.getCommandString().eachLine { line -> logger.info(line) }
if (logger.isInfoEnabled() || node.config.daemonize == false) {
runAntCommand(project, antRunner, System.out, System.err)
} else {
// buffer the output, we may not need to print it
PrintStream captureStream = new PrintStream(node.buffer, true, "UTF-8")
runAntCommand(project, antRunner, captureStream, captureStream)
}
}
Task start = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
start.doLast(elasticsearchRunner)
return start
}
static Task configureWaitTask(String name, Project project, List<NodeInfo> nodes, List<Task> startTasks) {
Task wait = project.tasks.create(name: name, dependsOn: startTasks)
wait.doLast {
ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name}") {
or {
for (NodeInfo node : nodes) {
resourceexists {
file(file: node.failedMarker.toString())
}
}
and {
for (NodeInfo node : nodes) {
resourceexists {
file(file: node.pidFile.toString())
}
resourceexists {
file(file: node.httpPortsFile.toString())
}
resourceexists {
file(file: node.transportPortsFile.toString())
}
}
}
}
}
boolean anyNodeFailed = false
for (NodeInfo node : nodes) {
anyNodeFailed |= node.failedMarker.exists()
}
if (ant.properties.containsKey("failed${name}".toString()) || anyNodeFailed) {
waitFailed(nodes, logger, 'Failed to start elasticsearch')
}
// go through each node checking the wait condition
for (NodeInfo node : nodes) {
// first bind node info to the closure, then pass to the ant runner so we can get good logging
Closure antRunner = node.config.waitCondition.curry(node)
boolean success
if (logger.isInfoEnabled()) {
success = runAntCommand(project, antRunner, System.out, System.err)
} else {
PrintStream captureStream = new PrintStream(node.buffer, true, "UTF-8")
success = runAntCommand(project, antRunner, captureStream, captureStream)
}
if (success == false) {
waitFailed(nodes, logger, 'Elasticsearch cluster failed to pass wait condition')
}
}
}
return wait
}
static void waitFailed(List<NodeInfo> nodes, Logger logger, String msg) {
for (NodeInfo node : nodes) {
if (logger.isInfoEnabled() == false) {
// We already log the command at info level. No need to do it twice.
node.getCommandString().eachLine { line -> logger.error(line) }
}
logger.error("Node ${node.nodeNum} output:")
logger.error("|-----------------------------------------")
logger.error("| failure marker exists: ${node.failedMarker.exists()}")
logger.error("| pid file exists: ${node.pidFile.exists()}")
logger.error("| http ports file exists: ${node.httpPortsFile.exists()}")
logger.error("| transport ports file exists: ${node.transportPortsFile.exists()}")
// the waitfor failed, so dump any output we got (if info logging this goes directly to stdout)
logger.error("|\n| [ant output]")
node.buffer.toString('UTF-8').eachLine { line -> logger.error("| ${line}") }
// also dump the log file for the startup script (which will include ES logging output to stdout)
if (node.startLog.exists()) {
logger.error("|\n| [log]")
node.startLog.eachLine { line -> logger.error("| ${line}") }
}
logger.error("|-----------------------------------------")
}
throw new GradleException(msg)
}
/** Adds a task to check if the process with the given pidfile is actually elasticsearch */
static Task configureCheckPreviousTask(String name, Project project, Object depends, NodeInfo node) {
return project.tasks.create(name: name, type: Exec, dependsOn: depends) {
onlyIf { node.pidFile.exists() }
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}"
File jps
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
jps = getJpsExecutableByName(project, "jps.exe")
} else {
jps = getJpsExecutableByName(project, "jps")
}
if (!jps.exists()) {
throw new GradleException("jps executable not found; ensure that you're running Gradle with the JDK rather than the JRE")
}
commandLine jps, '-l'
standardOutput = new ByteArrayOutputStream()
doLast {
String out = standardOutput.toString()
if (out.contains("${pid} org.elasticsearch.bootstrap.Elasticsearch") == false) {
logger.error('jps -l')
logger.error(out)
logger.error("pid file: ${pidFile}")
logger.error("pid: ${pid}")
throw new GradleException("jps -l did not report any process with org.elasticsearch.bootstrap.Elasticsearch\n" +
"Did you run gradle clean? Maybe an old pid file is still lying around.")
} else {
logger.info(out)
}
}
}
}
private static File getJpsExecutableByName(Project project, String jpsExecutableName) {
return Paths.get(project.javaHome.toString(), "bin/" + jpsExecutableName).toFile()
}
/** Adds a task to kill an elasticsearch node with the given pidfile */
static Task configureStopTask(String name, Project project, Object depends, NodeInfo node) {
return project.tasks.create(name: name, type: LoggedExec, dependsOn: depends) {
onlyIf { node.pidFile.exists() }
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}"
doFirst {
logger.info("Shutting down external node with pid ${pid}")
}
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable 'Taskkill'
args '/PID', pid, '/F'
} else {
executable 'kill'
args '-9', pid
}
doLast {
project.delete(node.pidFile)
}
}
}
/** Returns a unique task name for this task and node configuration */
static String taskName(Task parentTask, NodeInfo node, String action) {
if (node.config.numNodes > 1) {
return "${parentTask.name}#node${node.nodeNum}.${action}"
} else {
return "${parentTask.name}#${action}"
}
}
public static String pluginTaskName(String action, String name, String suffix) {
// replace every dash followed by a character with just the uppercase character
String camelName = name.replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) }
return action + camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1) + suffix
}
/** Runs an ant command, sending output to the given out and error streams */
static Object runAntCommand(Project project, Closure command, PrintStream outputStream, PrintStream errorStream) {
DefaultLogger listener = new DefaultLogger(
errorPrintStream: errorStream,
outputPrintStream: outputStream,
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
project.ant.project.addBuildListener(listener)
Object retVal = command(project.ant)
project.ant.project.removeBuildListener(listener)
return retVal
}
}

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.api.Project
import org.gradle.api.artifacts.Dependency
import org.gradle.api.artifacts.ProjectDependency
import org.gradle.api.tasks.Copy
/**
* A plugin to run messy tests, which are generally tests that depend on plugins.
*
* This plugin will add the same test configuration as standalone tests, except
* also add the plugin-metadata and properties files for each plugin project
* dependency.
*/
class MessyTestPlugin extends StandaloneTestPlugin {
@Override
public void apply(Project project) {
super.apply(project)
project.configurations.testCompile.dependencies.all { Dependency dep ->
// this closure is run every time a compile dependency is added
if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) {
project.gradle.projectsEvaluated {
addPluginResources(project, dep.dependencyProject)
}
}
}
}
private static addPluginResources(Project project, Project pluginProject) {
String outputDir = "generated-resources/${pluginProject.name}"
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
copyPluginMetadata.into(outputDir)
copyPluginMetadata.from(pluginProject.tasks.pluginProperties)
copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata'))
project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
// add each generated dir to the test classpath in IDEs
//project.eclipse.classpath.sourceSets = [project.sourceSets.test]
project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]]
}
}

View File

@ -0,0 +1,215 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Project
import org.gradle.api.Task
/**
* A container for the files and configuration associated with a single node in a test cluster.
*/
class NodeInfo {
/** common configuration for all nodes, including this one */
ClusterConfiguration config
/** node number within the cluster, for creating unique names and paths */
int nodeNum
/** name of the cluster this node is part of */
String clusterName
/** root directory all node files and operations happen under */
File baseDir
/** the pid file the node will use */
File pidFile
/** a file written by elasticsearch containing the ports of each bound address for http */
File httpPortsFile
/** a file written by elasticsearch containing the ports of each bound address for transport */
File transportPortsFile
/** elasticsearch home dir */
File homeDir
/** config directory */
File confDir
/** THE config file */
File configFile
/** working directory for the node process */
File cwd
/** file that if it exists, indicates the node failed to start */
File failedMarker
/** stdout/stderr log of the elasticsearch process for this node */
File startLog
/** directory to install plugins from */
File pluginsTmpDir
/** environment variables to start the node with */
Map<String, String> env
/** arguments to start the node with */
List<String> args
/** Executable to run the bin/elasticsearch with, either cmd or sh */
String executable
/** Path to the elasticsearch start script */
File esScript
/** script to run when running in the background */
File wrapperScript
/** buffer for ant output when starting this node */
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
/** Creates a node to run as part of a cluster for the given task */
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) {
this.config = config
this.nodeNum = nodeNum
clusterName = "${task.path.replace(':', '_').substring(1)}"
baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}")
pidFile = new File(baseDir, 'es.pid')
homeDir = homeDir(baseDir, config.distribution)
confDir = confDir(baseDir, config.distribution)
configFile = new File(confDir, 'elasticsearch.yml')
// even for rpm/deb, the logs are under home because we dont start with real services
File logsDir = new File(homeDir, 'logs')
httpPortsFile = new File(logsDir, 'http.ports')
transportPortsFile = new File(logsDir, 'transport.ports')
cwd = new File(baseDir, "cwd")
failedMarker = new File(cwd, 'run.failed')
startLog = new File(cwd, 'run.log')
pluginsTmpDir = new File(baseDir, "plugins tmp")
args = []
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
executable = 'cmd'
args.add('/C')
args.add('"') // quote the entire command
wrapperScript = new File(cwd, "run.bat")
esScript = new File(homeDir, 'bin/elasticsearch.bat')
} else {
executable = 'sh'
wrapperScript = new File(cwd, "run")
esScript = new File(homeDir, 'bin/elasticsearch')
}
if (config.daemonize) {
args.add("${wrapperScript}")
} else {
args.add("${esScript}")
}
env = [
'JAVA_HOME' : project.javaHome,
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
]
args.add("-Des.tests.portsfile=true")
args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" })
for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.getKey().startsWith('es.')) {
args.add("-D${property.getKey()}=${property.getValue()}")
}
}
args.add("-Des.path.conf=${confDir}")
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
args.add('"') // end the entire command, quoted
}
}
/** Returns debug string for the command that started this node. */
String getCommandString() {
String esCommandString = "\nNode ${nodeNum} configuration:\n"
esCommandString += "|-----------------------------------------\n"
esCommandString += "| cwd: ${cwd}\n"
esCommandString += "| command: ${executable} ${args.join(' ')}\n"
esCommandString += '| environment:\n'
env.each { k, v -> esCommandString += "| ${k}: ${v}\n" }
if (config.daemonize) {
esCommandString += "|\n| [${wrapperScript.name}]\n"
wrapperScript.eachLine('UTF-8', { line -> esCommandString += " ${line}\n"})
}
esCommandString += '|\n| [elasticsearch.yml]\n'
configFile.eachLine('UTF-8', { line -> esCommandString += "| ${line}\n" })
esCommandString += "|-----------------------------------------"
return esCommandString
}
void writeWrapperScript() {
String argsPasser = '"$@"'
String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi"
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
argsPasser = '%*'
exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )"
}
wrapperScript.setText("\"${esScript}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
}
/** Returns an address and port suitable for a uri to connect to this node over http */
String httpUri() {
return httpPortsFile.readLines("UTF-8").get(0)
}
/** Returns an address and port suitable for a uri to connect to this node over transport protocol */
String transportUri() {
return transportPortsFile.readLines("UTF-8").get(0)
}
/** Returns the directory elasticsearch home is contained in for the given distribution */
static File homeDir(File baseDir, String distro) {
String path
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'tar':
path = "elasticsearch-${VersionProperties.elasticsearch}"
break
case 'rpm':
case 'deb':
path = "${distro}-extracted/usr/share/elasticsearch"
break
default:
throw new InvalidUserDataException("Unknown distribution: ${distro}")
}
return new File(baseDir, path)
}
static File confDir(File baseDir, String distro) {
switch (distro) {
case 'integ-test-zip':
case 'zip':
case 'tar':
return new File(homeDir(baseDir, distro), 'config')
case 'rpm':
case 'deb':
return new File(baseDir, "${distro}-extracted/etc/elasticsearch")
default:
throw new InvalidUserDataException("Unkown distribution: ${distro}")
}
}
}

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.internal.tasks.options.Option
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.Input
import org.gradle.util.ConfigureUtil
/**
* Runs integration tests, but first starts an ES cluster,
* and passes the ES cluster info as parameters to the tests.
*/
public class RestIntegTestTask extends RandomizedTestingTask {
ClusterConfiguration clusterConfig = new ClusterConfiguration()
/** Flag indicating whether the rest tests in the rest spec should be run. */
@Input
boolean includePackaged = false
public RestIntegTestTask() {
description = 'Runs rest tests against an elasticsearch cluster.'
group = JavaBasePlugin.VERIFICATION_GROUP
dependsOn(project.testClasses)
classpath = project.sourceSets.test.runtimeClasspath
testClassesDir = project.sourceSets.test.output.classesDir
// start with the common test configuration
configure(BuildPlugin.commonTestConfig(project))
// override/add more for rest tests
parallelism = '1'
include('**/*IT.class')
systemProperty('tests.rest.load_packaged', 'false')
// copy the rest spec/tests into the test resources
RestSpecHack.configureDependencies(project)
project.afterEvaluate {
dependsOn(RestSpecHack.configureTask(project, includePackaged))
}
// this must run after all projects have been configured, so we know any project
// references can be accessed as a fully configured
project.gradle.projectsEvaluated {
Object clusterUri = ClusterFormationTasks.setup(project, this, clusterConfig)
systemProperty('tests.cluster', clusterUri)
}
}
@Option(
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
public void setDebug(boolean enabled) {
clusterConfig.debug = enabled;
}
@Input
public void cluster(Closure closure) {
ConfigureUtil.configure(closure, clusterConfig)
}
public ClusterConfiguration getCluster() {
return clusterConfig
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.tasks.Copy
/**
* The rest-api-spec tests are loaded from the classpath. However, they
* currently must be available on the local filesystem. This class encapsulates
* setting up tasks to copy the rest spec api to test resources.
*/
public class RestSpecHack {
/**
* Sets dependencies needed to copy the rest spec.
* @param project The project to add rest spec dependency to
*/
public static void configureDependencies(Project project) {
project.configurations {
restSpec
}
project.dependencies {
restSpec "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}"
}
}
/**
* Creates a task to copy the rest spec files.
*
* @param project The project to add the copy task to
* @param includePackagedTests true if the packaged tests should be copied, false otherwise
*/
public static Task configureTask(Project project, boolean includePackagedTests) {
Map copyRestSpecProps = [
name : 'copyRestSpec',
type : Copy,
dependsOn: [project.configurations.restSpec, 'processTestResources']
]
Task copyRestSpec = project.tasks.create(copyRestSpecProps) {
from { project.zipTree(project.configurations.restSpec.singleFile) }
include 'rest-api-spec/api/**'
if (includePackagedTests) {
include 'rest-api-spec/test/**'
}
into project.sourceSets.test.output.resourcesDir
}
project.idea {
module {
if (scopes.TEST != null) {
scopes.TEST.plus.add(project.configurations.restSpec)
}
}
}
return copyRestSpec
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.gradle.api.Plugin
import org.gradle.api.Project
/** A plugin to add rest integration tests. Used for qa projects. */
public class RestTestPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.pluginManager.apply(StandaloneTestBasePlugin)
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
integTest.mustRunAfter(project.precommit)
project.check.dependsOn(integTest)
}
}

View File

@ -0,0 +1,35 @@
package org.elasticsearch.gradle.test
import org.gradle.api.DefaultTask
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.internal.tasks.options.Option
import org.gradle.util.ConfigureUtil
public class RunTask extends DefaultTask {
ClusterConfiguration clusterConfig = new ClusterConfiguration(httpPort: 9200, transportPort: 9300, daemonize: false)
public RunTask() {
description = "Runs elasticsearch with '${project.path}'"
group = 'Verification'
project.afterEvaluate {
ClusterFormationTasks.setup(project, this, clusterConfig)
}
}
@Option(
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
public void setDebug(boolean enabled) {
clusterConfig.debug = enabled;
}
/** Configure the cluster that will be run. */
@Override
public Task configure(Closure closure) {
ConfigureUtil.configure(closure, clusterConfig)
return this
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.plugins.ide.eclipse.model.EclipseClasspath
/** Configures the build to have a rest integration test. */
public class StandaloneTestBasePlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.pluginManager.apply(JavaBasePlugin)
project.pluginManager.apply(RandomizedTestingPlugin)
BuildPlugin.globalBuildInfo(project)
BuildPlugin.configureRepositories(project)
// only setup tests to build
project.sourceSets.create('test')
project.dependencies.add('testCompile', "org.elasticsearch:test-framework:${VersionProperties.elasticsearch}")
project.eclipse.classpath.sourceSets = [project.sourceSets.test]
project.eclipse.classpath.plusConfigurations = [project.configurations.testRuntime]
project.idea.module.testSourceDirs += project.sourceSets.test.java.srcDirs
project.idea.module.scopes['TEST'] = [plus: [project.configurations.testRuntime]]
PrecommitTasks.create(project, false)
project.check.dependsOn(project.precommit)
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */
public class StandaloneTestPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.pluginManager.apply(StandaloneTestBasePlugin)
Map testOptions = [
name: 'test',
type: RandomizedTestingTask,
dependsOn: 'testClasses',
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Runs unit tests that are separate'
]
RandomizedTestingTask test = project.tasks.create(testOptions)
test.configure(BuildPlugin.commonTestConfig(project))
test.classpath = project.sourceSets.test.runtimeClasspath
test.testClassesDir project.sourceSets.test.output.classesDir
test.mustRunAfter(project.precommit)
project.check.dependsOn(test)
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.*
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import org.gradle.process.internal.ExecAction
import org.gradle.process.internal.ExecActionFactory
import javax.inject.Inject
/**
* Runs bats over vagrant. Pretty much like running it using Exec but with a
* nicer output formatter.
*/
class BatsOverVagrantTask extends DefaultTask {
String command
String boxName
ExecAction execAction
BatsOverVagrantTask() {
execAction = getExecActionFactory().newExecAction()
}
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException();
}
@Inject
ExecActionFactory getExecActionFactory() {
throw new UnsupportedOperationException();
}
void boxName(String boxName) {
this.boxName = boxName
}
void command(String command) {
this.command = command
}
@TaskAction
void exec() {
// It'd be nice if --machine-readable were, well, nice
execAction.commandLine(['vagrant', 'ssh', boxName, '--command', command])
execAction.setStandardOutput(new TapLoggerOutputStream(
command: command,
factory: getProgressLoggerFactory(),
logger: logger))
execAction.execute();
}
}

View File

@ -0,0 +1,107 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant
import com.carrotsearch.gradle.junit4.LoggingOutputStream
import org.gradle.api.GradleScriptException
import org.gradle.api.logging.Logger
import org.gradle.logging.ProgressLogger
import java.util.regex.Matcher
/**
* Adapts an OutputStream containing output from bats into a ProgressLogger
* and a Logger. Every test output goes to the ProgressLogger and all failures
* and non-test output goes to the Logger. That means you can always glance
* at the result of the last test and the cumulative pass/fail/skip stats and
* the failures are all logged.
*
* There is a Tap4j project but we can't use it because it wants to parse the
* entire TAP stream at once and won't parse it stream-wise.
*/
class TapLoggerOutputStream extends LoggingOutputStream {
ProgressLogger progressLogger
Logger logger
int testsCompleted = 0
int testsFailed = 0
int testsSkipped = 0
Integer testCount
String countsFormat
TapLoggerOutputStream(Map args) {
logger = args.logger
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
progressLogger.setDescription("TAP output for $args.command")
progressLogger.started()
progressLogger.progress("Starting $args.command...")
}
void flush() {
if (end == start) return
line(new String(buffer, start, end - start))
start = end
}
void line(String line) {
// System.out.print "===> $line\n"
if (testCount == null) {
try {
testCount = line.split('\\.').last().toInteger()
def length = (testCount as String).length()
countsFormat = "%0${length}d"
countsFormat = "[$countsFormat|$countsFormat|$countsFormat/$countsFormat]"
return
} catch (Exception e) {
throw new GradleScriptException(
'Error parsing first line of TAP stream!!', e)
}
}
Matcher m = line =~ /(?<status>ok|not ok) \d+(?<skip> # skip (?<skipReason>\(.+\))?)? \[(?<suite>.+)\] (?<test>.+)/
if (!m.matches()) {
/* These might be failure report lines or comments or whatever. Its hard
to tell and it doesn't matter. */
logger.warn(line)
return
}
boolean skipped = m.group('skip') != null
boolean success = !skipped && m.group('status') == 'ok'
String skipReason = m.group('skipReason')
String suiteName = m.group('suite')
String testName = m.group('test')
String status
if (skipped) {
status = "SKIPPED"
testsSkipped++
} else if (success) {
status = " OK"
testsCompleted++
} else {
status = " FAILED"
testsFailed++
}
String counts = sprintf(countsFormat,
[testsCompleted, testsFailed, testsSkipped, testCount])
progressLogger.progress("Tests $counts, $status [$suiteName] $testName")
if (!success) {
logger.warn(line)
}
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant
import org.gradle.api.DefaultTask
import org.gradle.api.tasks.*
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import org.gradle.process.internal.ExecAction
import org.gradle.process.internal.ExecActionFactory
import javax.inject.Inject
/**
* Runs a vagrant command. Pretty much like Exec task but with a nicer output
* formatter and defaults to `vagrant` as first part of commandLine.
*/
class VagrantCommandTask extends DefaultTask {
List<Object> commandLine
String boxName
ExecAction execAction
VagrantCommandTask() {
execAction = getExecActionFactory().newExecAction()
}
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException();
}
@Inject
ExecActionFactory getExecActionFactory() {
throw new UnsupportedOperationException();
}
void boxName(String boxName) {
this.boxName = boxName
}
void commandLine(Object... commandLine) {
this.commandLine = commandLine
}
@TaskAction
void exec() {
// It'd be nice if --machine-readable were, well, nice
execAction.commandLine(['vagrant'] + commandLine)
execAction.setStandardOutput(new VagrantLoggerOutputStream(
command: commandLine.join(' '),
factory: getProgressLoggerFactory(),
/* Vagrant tends to output a lot of stuff, but most of the important
stuff starts with ==> $box */
squashedPrefix: "==> $boxName: "))
execAction.execute();
}
}

View File

@ -0,0 +1,120 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.vagrant
import com.carrotsearch.gradle.junit4.LoggingOutputStream
import org.gradle.logging.ProgressLogger
/**
* Adapts an OutputStream being written to by vagrant into a ProcessLogger. It
* has three hacks to make the output nice:
*
* 1. Attempt to filter out the "unimportant" output from vagrant. Usually
* vagrant prefixes its more important output with "==> $boxname: ". The stuff
* that isn't prefixed that way can just be thrown out.
*
* 2. It also attempts to detect when vagrant does tricks assuming its writing
* to a terminal emulator and renders the output more like gradle users expect.
* This means that progress indicators for things like box downloading work and
* box importing look pretty good.
*
* 3. It catches lines that look like "==> $boxName ==> Heading text" and stores
* the text after the second arrow as a "heading" for use in annotating
* provisioning. It does this because provisioning can spit out _lots_ of text
* and its very easy to lose context when there isn't a scrollback. So we've
* sprinkled `echo "==> Heading text"` into the provisioning scripts for this
* to catch so it can render the output like
* "Heading text > stdout from the provisioner".
*/
class VagrantLoggerOutputStream extends LoggingOutputStream {
static final String HEADING_PREFIX = '==> '
ProgressLogger progressLogger
String squashedPrefix
String lastLine = ''
boolean inProgressReport = false
String heading = ''
VagrantLoggerOutputStream(Map args) {
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
progressLogger.setDescription("Vagrant $args.command")
progressLogger.started()
progressLogger.progress("Starting vagrant $args.command...")
squashedPrefix = args.squashedPrefix
}
void flush() {
if (end == start) return
line(new String(buffer, start, end - start))
start = end
}
void line(String line) {
// debugPrintLine(line) // Uncomment me to log every incoming line
if (line.startsWith('\r\u001b')) {
/* We don't want to try to be a full terminal emulator but we want to
keep the escape sequences from leaking and catch _some_ of the
meaning. */
line = line.substring(2)
if ('[K' == line) {
inProgressReport = true
}
return
}
if (line.startsWith(squashedPrefix)) {
line = line.substring(squashedPrefix.length())
inProgressReport = false
lastLine = line
if (line.startsWith(HEADING_PREFIX)) {
line = line.substring(HEADING_PREFIX.length())
heading = line + ' > '
} else {
line = heading + line
}
} else if (inProgressReport) {
inProgressReport = false
line = lastLine + line
} else {
return
}
// debugLogLine(line) // Uncomment me to log every line we add to the logger
progressLogger.progress(line)
}
void debugPrintLine(line) {
System.out.print '----------> '
for (int i = start; i < end; i++) {
switch (buffer[i] as char) {
case ' '..'~':
System.out.print buffer[i] as char
break
default:
System.out.print '%'
System.out.print Integer.toHexString(buffer[i])
}
}
System.out.print '\n'
}
void debugLogLine(line) {
System.out.print '>>>>>>>>>>> '
System.out.print line
System.out.print '\n'
}
}

View File

@ -0,0 +1 @@
implementation-class=com.carrotsearch.gradle.junit4.RandomizedTestingPlugin

View File

@ -0,0 +1 @@
implementation-class=org.elasticsearch.gradle.BuildPlugin

View File

@ -0,0 +1 @@
implementation-class=org.elasticsearch.gradle.plugin.PluginBuildPlugin

View File

@ -0,0 +1,20 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
implementation-class=org.elasticsearch.gradle.test.MessyTestPlugin

View File

@ -0,0 +1 @@
implementation-class=org.elasticsearch.gradle.test.RestTestPlugin

View File

@ -0,0 +1,20 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
implementation-class=org.elasticsearch.gradle.test.StandaloneTestPlugin

Binary file not shown.

View File

@ -0,0 +1,4 @@
This directory contains templates that work around gradle-ospackage-plugin
trying to be helpful and adding templates for your os packaging scripts. We
have relatively nice scripts already so we just override the templates to be
mostly noops.

View File

@ -0,0 +1,3 @@
<% files.each {file -> %><%= file
%>
<% } %>

View File

@ -0,0 +1,2 @@
#!/bin/sh -e
<% commands.each {command -> %><%= command %><% } %>

View File

@ -0,0 +1,2 @@
#!/bin/sh -e
<% commands.each {command -> %><%= command %><% } %>

View File

@ -1,6 +1,6 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding//src/test/resources=UTF-8
encoding/<project>=UTF-8
encoding/rest-api-spec=UTF-8
encoding/<project>=UTF-8

View File

@ -0,0 +1,22 @@
eclipse.preferences.version=1
# previous configuration from maven build
# this is merged with gradle's generated properties during 'gradle eclipse'
# NOTE: null pointer analysis etc is not enabled currently, it seems very unstable
# (e.g. crashing eclipse etc)
# org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=enabled
# org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
# org.eclipse.jdt.core.compiler.annotation.nullable=org.elasticsearch.common.Nullable
# org.eclipse.jdt.core.compiler.annotation.nullanalysis=enabled
# org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
# org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=warning
# org.eclipse.jdt.core.compiler.problem.nullReference=warning
# org.eclipse.jdt.core.compiler.problem.nullSpecViolation=warning
# org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
# org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.formatter.lineSplit=140
org.eclipse.jdt.core.formatter.tabulation.char=space
org.eclipse.jdt.core.formatter.tabulation.size=4

View File

@ -0,0 +1,2 @@
version=@version@
luceneVersion=@luceneVersion@

View File

@ -38,9 +38,13 @@ org.apache.lucene.index.DocsEnum
org.apache.lucene.index.DocsAndPositionsEnum
org.apache.lucene.queries.TermFilter
org.apache.lucene.queries.TermsFilter
org.apache.lucene.search.Filter
org.apache.lucene.search.FilteredQuery
org.apache.lucene.search.TermRangeFilter
org.apache.lucene.search.NumericRangeFilter
org.apache.lucene.search.PrefixFilter
org.apache.lucene.search.QueryWrapperFilter
org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter
java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead.
java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead.
@ -88,8 +92,37 @@ java.net.InetAddress#getCanonicalHostName()
java.net.InetSocketAddress#getHostName() @ Use getHostString() instead, which avoids a DNS lookup
@defaultMessage Do not violate java's access system
java.lang.Class#getDeclaredClasses() @ Do not violate java's access system: Use getClasses() instead
java.lang.Class#getDeclaredConstructor(java.lang.Class[]) @ Do not violate java's access system: Use getConstructor() instead
java.lang.Class#getDeclaredConstructors() @ Do not violate java's access system: Use getConstructors() instead
java.lang.Class#getDeclaredField(java.lang.String) @ Do not violate java's access system: Use getField() instead
java.lang.Class#getDeclaredFields() @ Do not violate java's access system: Use getFields() instead
java.lang.Class#getDeclaredMethod(java.lang.String, java.lang.Class[]) @ Do not violate java's access system: Use getMethod() instead
java.lang.Class#getDeclaredMethods() @ Do not violate java's access system: Use getMethods() instead
java.lang.reflect.AccessibleObject#setAccessible(boolean)
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
@defaultMessage this method needs special permission
java.lang.Thread#getAllStackTraces()
@defaultMessage Please do not terminate the application
java.lang.System#exit(int)
java.lang.Runtime#exit(int)
java.lang.Runtime#halt(int)
@defaultMessage Treat system properties as immutable
java.lang.System#setProperties(java.util.Properties)
java.lang.System#setProperty(java.lang.String,java.lang.String)
java.lang.System#clearProperty(java.lang.String)
java.lang.System#getProperties() @ Use BootstrapInfo.getSystemProperties for a read-only view
@defaultMessage Avoid unchecked warnings by using Collections#empty(List|Map|Set) methods
java.util.Collections#EMPTY_LIST
java.util.Collections#EMPTY_MAP
java.util.Collections#EMPTY_SET
java.util.Collections#shuffle(java.util.List) @ Use java.util.Collections#shuffle(java.util.List, java.util.Random) with a reproducible source of randomness
java.util.Random#<init>() @ Use org.elasticsearch.common.random.Randomness#create for reproducible sources of randomness

View File

@ -87,3 +87,15 @@ java.util.concurrent.Future#cancel(boolean)
@defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead
org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
org.elasticsearch.common.io.PathUtils#get(java.net.URI)
@defaultMessage Don't use deprecated Query#setBoost, wrap the query into a BoostQuery instead
org.apache.lucene.search.Query#setBoost(float)
@defaultMessage Constructing a DateTime without a time zone is dangerous
org.joda.time.DateTime#<init>()
org.joda.time.DateTime#<init>(long)
org.joda.time.DateTime#<init>(int, int, int, int, int)
org.joda.time.DateTime#<init>(int, int, int, int, int, int)
org.joda.time.DateTime#<init>(int, int, int, int, int, int, int)
org.joda.time.DateTime#now()
org.joda.time.DateTimeZone#getDefault()

View File

@ -21,3 +21,5 @@ com.carrotsearch.randomizedtesting.annotations.Repeat @ Don't commit hardcoded r
org.apache.lucene.codecs.Codec#setDefault(org.apache.lucene.codecs.Codec) @ Use the SuppressCodecs("*") annotation instead
org.apache.lucene.util.LuceneTestCase$Slow @ Don't write slow tests
org.junit.Ignore @ Use AwaitsFix instead
org.junit.Test @defaultMessage Just name your test method testFooBar

View File

@ -24,26 +24,26 @@
# jvm=true
# classname=foo.bar.BazPlugin
# description=My cool plugin
# version=2.0.0-rc1
# version=2.0
# elasticsearch.version=2.0
# java.version=1.7
#
### mandatory elements for all plugins:
#
# 'description': simple summary of the plugin
description=${project.description}
description=${description}
#
# 'version': plugin's version
version=${project.version}
version=${version}
#
# 'name': the plugin name
name=${elasticsearch.plugin.name}
name=${name}
### mandatory elements for site plugins:
#
# 'site': set to true to indicate contents of the _site/
# directory in the root of the plugin should be served.
site=${elasticsearch.plugin.site}
site=${site}
#
### mandatory elements for jvm plugins :
#
@ -52,29 +52,25 @@ site=${elasticsearch.plugin.site}
# Note that only jar files in the root directory are
# added to the classpath for the plugin! If you need
# other resources, package them into a resources jar.
jvm=${elasticsearch.plugin.jvm}
jvm=${jvm}
#
# 'classname': the name of the class to load, fully-qualified.
classname=${elasticsearch.plugin.classname}
classname=${classname}
#
# 'java.version' version of java the code is built against
# use the system property java.specification.version
# version string must be a sequence of nonnegative decimal integers
# separated by "."'s and may have leading zeros
java.version=${maven.compiler.target}
java.version=${javaVersion}
#
# 'elasticsearch.version' version of elasticsearch compiled against
# You will have to release a new version of the plugin for each new
# elasticsearch release. This version is checked when the plugin
# is loaded so Elasticsearch will refuse to start in the presence of
# plugins with the incorrect elasticsearch.version.
elasticsearch.version=${elasticsearch.version}
elasticsearch.version=${elasticsearchVersion}
#
### deprecated elements for jvm plugins :
#
# 'isolated': true if the plugin should have its own classloader.
# passing false is deprecated, and only intended to support plugins
# passing false is deprecated, and only intended to support plugins
# that have hard dependencies against each other. If this is
# not specified, then the plugin is isolated by default.
isolated=${elasticsearch.plugin.isolated}
#
isolated=${isolated}
#

View File

@ -0,0 +1,19 @@
elasticsearch = 3.0.0-SNAPSHOT
lucene = 5.5.0-snapshot-1719088
# optional dependencies
spatial4j = 0.5
jts = 1.13
jackson = 2.6.2
log4j = 1.2.17
slf4j = 1.6.2
jna = 4.1.0
# test dependencies
randomizedrunner = 2.3.2
junit = 4.11
httpclient = 4.3.6
httpcore = 4.3.3
commonslogging = 1.1.3
commonscodec = 1.10

View File

@ -1,559 +0,0 @@
org.apache.lucene.analysis.miscellaneous.TruncateTokenFilterTests=190
org.apache.lucene.analysis.miscellaneous.UniqueTokenFilterTests=187
org.apache.lucene.queries.BlendedTermQueryTests=696
org.apache.lucene.queries.MinDocQueryTests=503
org.apache.lucene.search.postingshighlight.CustomPassageFormatterTests=69
org.apache.lucene.search.postingshighlight.CustomPostingsHighlighterTests=599
org.apache.lucene.search.postingshighlight.CustomSeparatorBreakIteratorTests=99
org.apache.lucene.util.SloppyMathTests=734
org.elasticsearch.ESExceptionTests=701
org.elasticsearch.ExceptionSerializationTests=3740
org.elasticsearch.NamingConventionTests=1061
org.elasticsearch.SpecialPermissionTests=90
org.elasticsearch.VersionTests=179
org.elasticsearch.action.OriginalIndicesTests=66
org.elasticsearch.action.admin.cluster.health.ClusterHealthResponsesTests=120
org.elasticsearch.action.admin.cluster.state.ClusterStateRequestTests=32
org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilderTests=51
org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestTests=60
org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequestTests=2294
org.elasticsearch.action.admin.indices.shards.IndicesShardStoreResponseTests=61
org.elasticsearch.action.admin.indices.stats.IndicesStatsTests=2832
org.elasticsearch.action.admin.indices.template.put.MetaDataIndexTemplateServiceTests=72
org.elasticsearch.action.admin.indices.warmer.put.PutWarmerRequestTests=98
org.elasticsearch.action.bulk.BulkRequestTests=578
org.elasticsearch.action.count.CountRequestBuilderTests=495
org.elasticsearch.action.count.CountRequestTests=21
org.elasticsearch.action.count.CountResponseTests=63
org.elasticsearch.action.fieldstats.FieldStatsRequestTests=45
org.elasticsearch.action.get.MultiGetShardRequestTests=81
org.elasticsearch.action.index.IndexRequestBuilderTests=372
org.elasticsearch.action.index.IndexRequestTests=78
org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequestTests=58
org.elasticsearch.action.percolate.MultiPercolatorRequestTests=144
org.elasticsearch.action.search.MultiSearchRequestTests=57
org.elasticsearch.action.search.SearchRequestBuilderTests=291
org.elasticsearch.action.support.IndicesOptionsTests=83
org.elasticsearch.action.support.ListenableActionFutureTests=55
org.elasticsearch.action.support.TransportActionFilterChainTests=52
org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeActionTests=110
org.elasticsearch.action.support.replication.BroadcastReplicationTests=151
org.elasticsearch.action.support.replication.ShardReplicationTests=236
org.elasticsearch.action.termvectors.TermVectorsUnitTests=293
org.elasticsearch.action.update.UpdateRequestTests=67
org.elasticsearch.bootstrap.BootstrapCliParserTests=73
org.elasticsearch.bootstrap.ESPolicyTests=55
org.elasticsearch.bootstrap.JNANativesTests=77
org.elasticsearch.bootstrap.JarHellTests=171
org.elasticsearch.bootstrap.JavaVersionTests=65
org.elasticsearch.bootstrap.SeccompTests=123
org.elasticsearch.bootstrap.SecurityTests=238
org.elasticsearch.client.node.NodeClientHeadersTests=355
org.elasticsearch.client.transport.TransportClientHeadersTests=640
org.elasticsearch.client.transport.TransportClientNodesServiceTests=3307
org.elasticsearch.cluster.ClusterModuleTests=73
org.elasticsearch.cluster.ClusterStateTests=17
org.elasticsearch.cluster.DiskUsageTests=107
org.elasticsearch.cluster.block.ClusterBlockTests=41
org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests=158
org.elasticsearch.cluster.metadata.HumanReadableIndexSettingsTests=41
org.elasticsearch.cluster.metadata.IndexNameExpressionResolverTests=370
org.elasticsearch.cluster.metadata.MappingMetaDataParserTests=103
org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeServiceTests=26
org.elasticsearch.cluster.metadata.ToAndFromJsonMetaDataTests=122
org.elasticsearch.cluster.metadata.WildcardExpressionResolverTests=80
org.elasticsearch.cluster.node.DiscoveryNodeFiltersTests=62
org.elasticsearch.cluster.routing.AllocationIdTests=79
org.elasticsearch.cluster.routing.RoutingBackwardCompatibilityTests=3477
org.elasticsearch.cluster.routing.RoutingServiceTests=368
org.elasticsearch.cluster.routing.RoutingTableTests=123
org.elasticsearch.cluster.routing.ShardRoutingTests=179
org.elasticsearch.cluster.routing.UnassignedInfoTests=146
org.elasticsearch.cluster.routing.allocation.AddIncrementallyTests=97
org.elasticsearch.cluster.routing.allocation.AllocationCommandsTests=137
org.elasticsearch.cluster.routing.allocation.AllocationPriorityTests=34
org.elasticsearch.cluster.routing.allocation.AwarenessAllocationTests=334
org.elasticsearch.cluster.routing.allocation.BalanceConfigurationTests=426
org.elasticsearch.cluster.routing.allocation.BalanceUnbalancedClusterTests=9557
org.elasticsearch.cluster.routing.allocation.ClusterRebalanceRoutingTests=908
org.elasticsearch.cluster.routing.allocation.ConcurrentRebalanceRoutingTests=157
org.elasticsearch.cluster.routing.allocation.DeadNodesAllocationTests=72
org.elasticsearch.cluster.routing.allocation.ElectReplicaAsPrimaryDuringRelocationTests=50
org.elasticsearch.cluster.routing.allocation.ExpectedShardSizeAllocationTests=127
org.elasticsearch.cluster.routing.allocation.FailedNodeRoutingTests=48
org.elasticsearch.cluster.routing.allocation.FailedShardsRoutingTests=151
org.elasticsearch.cluster.routing.allocation.FilterRoutingTests=53
org.elasticsearch.cluster.routing.allocation.IndexBalanceTests=118
org.elasticsearch.cluster.routing.allocation.NodeVersionAllocationDeciderTests=424
org.elasticsearch.cluster.routing.allocation.PreferLocalPrimariesToRelocatingPrimariesTests=75
org.elasticsearch.cluster.routing.allocation.PreferPrimaryAllocationTests=97
org.elasticsearch.cluster.routing.allocation.PrimaryElectionRoutingTests=337
org.elasticsearch.cluster.routing.allocation.PrimaryNotRelocatedWhileBeingRecoveredTests=2581
org.elasticsearch.cluster.routing.allocation.RandomAllocationDeciderTests=53
org.elasticsearch.cluster.routing.allocation.RebalanceAfterActiveTests=79
org.elasticsearch.cluster.routing.allocation.ReplicaAllocatedAfterPrimaryTests=98
org.elasticsearch.cluster.routing.allocation.RoutingNodesIntegrityTests=47
org.elasticsearch.cluster.routing.allocation.SameShardRoutingTests=99
org.elasticsearch.cluster.routing.allocation.ShardVersioningTests=54
org.elasticsearch.cluster.routing.allocation.ShardsLimitAllocationTests=69
org.elasticsearch.cluster.routing.allocation.SingleShardNoReplicasRoutingTests=209
org.elasticsearch.cluster.routing.allocation.SingleShardOneReplicaRoutingTests=57
org.elasticsearch.cluster.routing.allocation.StartedShardsRoutingTests=83
org.elasticsearch.cluster.routing.allocation.TenShardsOneReplicaRoutingTests=39
org.elasticsearch.cluster.routing.allocation.ThrottlingAllocationTests=14
org.elasticsearch.cluster.routing.allocation.UpdateNumberOfReplicasTests=30
org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDeciderTests=75
org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDeciderUnitTests=107
org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationTests=108
org.elasticsearch.cluster.routing.operation.hash.murmur3.Murmur3HashFunctionTests=71
org.elasticsearch.cluster.serialization.ClusterSerializationTests=25
org.elasticsearch.cluster.serialization.ClusterStateToStringTests=42
org.elasticsearch.cluster.serialization.DiffableTests=95
org.elasticsearch.cluster.settings.SettingsValidatorTests=53
org.elasticsearch.cluster.structure.RoutingIteratorTests=1016
org.elasticsearch.codecs.CodecTests=9816
org.elasticsearch.common.Base64Tests=2127
org.elasticsearch.common.BooleansTests=54
org.elasticsearch.common.ChannelsTests=222
org.elasticsearch.common.ParseFieldTests=92
org.elasticsearch.common.PidFileTests=2205
org.elasticsearch.common.StringsTests=96
org.elasticsearch.common.TableTests=90
org.elasticsearch.common.UUIDTests=1844
org.elasticsearch.common.blobstore.BlobStoreTests=44
org.elasticsearch.common.breaker.MemoryCircuitBreakerTests=187
org.elasticsearch.common.bytes.BytesReferenceTests=42
org.elasticsearch.common.bytes.PagedBytesReferenceTests=890
org.elasticsearch.common.cli.CheckFileCommandTests=462
org.elasticsearch.common.cli.CliToolTests=195
org.elasticsearch.common.cli.TerminalTests=111
org.elasticsearch.common.collect.CopyOnWriteHashMapTests=138
org.elasticsearch.common.compress.deflate.DeflateCompressedStreamTests=3050
org.elasticsearch.common.compress.deflate.DeflateXContentTests=1022
org.elasticsearch.common.compress.lzf.CorruptedCompressorTests=47
org.elasticsearch.common.compress.lzf.LZFCompressedStreamTests=3845
org.elasticsearch.common.compress.lzf.LZFXContentTests=738
org.elasticsearch.common.geo.GeoDistanceTests=183
org.elasticsearch.common.geo.GeoHashTests=603
org.elasticsearch.common.geo.GeoJSONShapeParserTests=271
org.elasticsearch.common.geo.ShapeBuilderTests=649
org.elasticsearch.common.geo.ShapeRelationTests=63
org.elasticsearch.common.geo.SpatialStrategyTests=141
org.elasticsearch.common.hash.MessageDigestsTests=6973
org.elasticsearch.common.hashing.MurmurHash3Tests=55
org.elasticsearch.common.hppc.HppcMapsTests=46
org.elasticsearch.common.io.FileSystemUtilsTests=2105
org.elasticsearch.common.io.StreamsTests=134
org.elasticsearch.common.io.stream.BytesStreamsTests=552
org.elasticsearch.common.joda.DateMathParserTests=127
org.elasticsearch.common.logging.jdk.JDKESLoggerTests=43
org.elasticsearch.common.logging.log4j.Log4jESLoggerTests=135
org.elasticsearch.common.logging.log4j.LoggingConfigurationTests=2414
org.elasticsearch.common.lucene.IndexCacheableQueryTests=124
org.elasticsearch.common.lucene.LuceneTests=1704
org.elasticsearch.common.lucene.ShardCoreKeyMapTests=177
org.elasticsearch.common.lucene.all.SimpleAllTests=2588
org.elasticsearch.common.lucene.index.ESDirectoryReaderTests=323
org.elasticsearch.common.lucene.index.FreqTermsEnumTests=682
org.elasticsearch.common.lucene.search.MultiPhrasePrefixQueryTests=58
org.elasticsearch.common.lucene.search.function.ScriptScoreFunctionTests=54
org.elasticsearch.common.lucene.search.morelikethis.MoreLikeThisQueryTests=69
org.elasticsearch.common.lucene.search.morelikethis.XMoreLikeThisTests=262
org.elasticsearch.common.lucene.store.ByteArrayIndexInputTests=112
org.elasticsearch.common.lucene.store.InputStreamIndexInputTests=89
org.elasticsearch.common.lucene.uid.VersionsTests=500
org.elasticsearch.common.math.MathUtilsTests=133
org.elasticsearch.common.network.NetworkAddressTests=2234
org.elasticsearch.common.network.NetworkServiceTests=103
org.elasticsearch.common.network.NetworkUtilsTests=37
org.elasticsearch.common.path.PathTrieTests=67
org.elasticsearch.common.property.PropertyPlaceholderTests=22
org.elasticsearch.common.recycler.ConcurrentRecyclerTests=57
org.elasticsearch.common.recycler.LockedRecyclerTests=105
org.elasticsearch.common.recycler.NoneRecyclerTests=15
org.elasticsearch.common.recycler.QueueRecyclerTests=76
org.elasticsearch.common.regex.RegexTests=103
org.elasticsearch.common.rounding.RoundingTests=59
org.elasticsearch.common.rounding.TimeZoneRoundingTests=180
org.elasticsearch.common.settings.SettingsFilterTests=9
org.elasticsearch.common.settings.SettingsTests=137
org.elasticsearch.common.settings.loader.JsonSettingsLoaderTests=29
org.elasticsearch.common.settings.loader.PropertiesSettingsLoaderTests=64
org.elasticsearch.common.settings.loader.YamlSettingsLoaderTests=214
org.elasticsearch.common.transport.BoundTransportAddressTests=139
org.elasticsearch.common.unit.ByteSizeUnitTests=77
org.elasticsearch.common.unit.ByteSizeValueTests=106
org.elasticsearch.common.unit.DistanceUnitTests=58
org.elasticsearch.common.unit.FuzzinessTests=55
org.elasticsearch.common.unit.RatioValueTests=49
org.elasticsearch.common.unit.SizeValueTests=88
org.elasticsearch.common.unit.TimeValueTests=120
org.elasticsearch.common.util.ArrayUtilsTests=43
org.elasticsearch.common.util.BigArraysTests=4095
org.elasticsearch.common.util.ByteUtilsTests=103
org.elasticsearch.common.util.BytesRefHashTests=1372
org.elasticsearch.common.util.CancellableThreadsTests=37
org.elasticsearch.common.util.CollectionUtilsTests=219
org.elasticsearch.common.util.LongHashTests=501
org.elasticsearch.common.util.LongObjectHashMapTests=820
org.elasticsearch.common.util.MultiDataPathUpgraderTests=984
org.elasticsearch.common.util.SingleObjectCacheTests=107
org.elasticsearch.common.util.URIPatternTests=128
org.elasticsearch.common.util.concurrent.CountDownTests=266
org.elasticsearch.common.util.concurrent.EsExecutorsTests=351
org.elasticsearch.common.util.concurrent.PrioritizedExecutorsTests=436
org.elasticsearch.common.util.concurrent.RefCountedTests=111
org.elasticsearch.common.util.iterable.IterablesTests=16
org.elasticsearch.common.xcontent.ObjectParserTests=110
org.elasticsearch.common.xcontent.XContentFactoryTests=16
org.elasticsearch.common.xcontent.builder.BuilderRawFieldTests=93
org.elasticsearch.common.xcontent.builder.XContentBuilderTests=90
org.elasticsearch.common.xcontent.cbor.CborXContentParserTests=65
org.elasticsearch.common.xcontent.cbor.JsonVsCborTests=33
org.elasticsearch.common.xcontent.smile.JsonVsSmileTests=38
org.elasticsearch.common.xcontent.support.XContentHelperTests=44
org.elasticsearch.common.xcontent.support.XContentMapValuesTests=102
org.elasticsearch.common.xcontent.support.filtering.CborFilteringGeneratorTests=100
org.elasticsearch.common.xcontent.support.filtering.JsonFilteringGeneratorTests=156
org.elasticsearch.common.xcontent.support.filtering.SmileFilteringGeneratorTests=177
org.elasticsearch.common.xcontent.support.filtering.YamlFilteringGeneratorTests=121
org.elasticsearch.deps.jackson.JacksonLocationTests=20
org.elasticsearch.deps.joda.SimpleJodaTests=223
org.elasticsearch.deps.lucene.SimpleLuceneTests=432
org.elasticsearch.deps.lucene.VectorHighlighterTests=354
org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandlerTests=21
org.elasticsearch.discovery.DiscoveryModuleTests=27
org.elasticsearch.discovery.ZenFaultDetectionTests=270
org.elasticsearch.discovery.zen.ElectMasterServiceTests=50
org.elasticsearch.discovery.zen.NodeJoinControllerTests=251
org.elasticsearch.discovery.zen.ZenDiscoveryUnitTests=65
org.elasticsearch.discovery.zen.ZenPingTests=16
org.elasticsearch.discovery.zen.publish.PendingClusterStatesQueueTests=179
org.elasticsearch.discovery.zen.publish.PublishClusterStateActionTests=887
org.elasticsearch.env.EnvironmentTests=85
org.elasticsearch.env.NodeEnvironmentTests=678
org.elasticsearch.fieldstats.FieldStatsTests=1846
org.elasticsearch.gateway.AsyncShardFetchTests=400
org.elasticsearch.gateway.DanglingIndicesStateTests=43
org.elasticsearch.gateway.GatewayMetaStateTests=202
org.elasticsearch.gateway.GatewayModuleTests=14
org.elasticsearch.gateway.GatewayServiceTests=62
org.elasticsearch.gateway.GatewayTests=66
org.elasticsearch.gateway.MetaDataStateFormatTests=232
org.elasticsearch.gateway.MetaStateServiceTests=255
org.elasticsearch.gateway.PrimaryShardAllocatorTests=85
org.elasticsearch.gateway.PriorityComparatorTests=49
org.elasticsearch.gateway.ReplicaShardAllocatorTests=64
org.elasticsearch.http.netty.NettyHttpChannelTests=100
org.elasticsearch.http.netty.NettyHttpServerPipeliningTests=3173
org.elasticsearch.http.netty.pipelining.HttpPipeliningHandlerTests=335
org.elasticsearch.index.IndexModuleTests=66
org.elasticsearch.index.IndexServiceTests=15
org.elasticsearch.index.VersionTypeTests=26
org.elasticsearch.index.aliases.IndexAliasesServiceTests=318
org.elasticsearch.index.analysis.ASCIIFoldingTokenFilterFactoryTests=765
org.elasticsearch.index.analysis.AnalysisFactoryTests=150
org.elasticsearch.index.analysis.AnalysisModuleTests=371
org.elasticsearch.index.analysis.AnalysisTests=34
org.elasticsearch.index.analysis.AnalyzerBackwardsCompatTests=1446
org.elasticsearch.index.analysis.CJKFilterFactoryTests=39
org.elasticsearch.index.analysis.CharFilterTests=94
org.elasticsearch.index.analysis.CompoundAnalysisTests=171
org.elasticsearch.index.analysis.HunspellTokenFilterFactoryTests=2896
org.elasticsearch.index.analysis.KeepFilterFactoryTests=53
org.elasticsearch.index.analysis.KeepTypesFilterFactoryTests=82
org.elasticsearch.index.analysis.LimitTokenCountFilterFactoryTests=54
org.elasticsearch.index.analysis.NGramTokenizerFactoryTests=63
org.elasticsearch.index.analysis.NumericAnalyzerTests=13
org.elasticsearch.index.analysis.PatternAnalyzerTests=1636
org.elasticsearch.index.analysis.PatternCaptureTokenFilterTests=147
org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactoryTests=48
org.elasticsearch.index.analysis.PreBuiltAnalyzerTests=293
org.elasticsearch.index.analysis.PreBuiltCharFilterFactoryFactoryTests=86
org.elasticsearch.index.analysis.PreBuiltTokenFilterFactoryFactoryTests=128
org.elasticsearch.index.analysis.PreBuiltTokenizerFactoryFactoryTests=26
org.elasticsearch.index.analysis.ShingleTokenFilterFactoryTests=136
org.elasticsearch.index.analysis.SnowballAnalyzerTests=52
org.elasticsearch.index.analysis.StemmerTokenFilterFactoryTests=2163
org.elasticsearch.index.analysis.StopAnalyzerTests=3016
org.elasticsearch.index.analysis.StopTokenFilterTests=375
org.elasticsearch.index.analysis.WordDelimiterTokenFilterFactoryTests=336
org.elasticsearch.index.analysis.commongrams.CommonGramsTokenFilterFactoryTests=359
org.elasticsearch.index.analysis.synonyms.SynonymsAnalysisTests=166
org.elasticsearch.index.cache.IndexCacheModuleTests=33
org.elasticsearch.index.cache.bitset.BitSetFilterCacheTests=657
org.elasticsearch.index.codec.CodecTests=479
org.elasticsearch.index.codec.postingformat.PostingsFormatTests=6900
org.elasticsearch.index.engine.CommitStatsTests=76
org.elasticsearch.index.engine.InternalEngineSettingsTests=323
org.elasticsearch.index.engine.InternalEngineTests=13034
org.elasticsearch.index.engine.ShadowEngineTests=3902
org.elasticsearch.index.fielddata.BinaryDVFieldDataTests=503
org.elasticsearch.index.fielddata.DisabledFieldDataFormatTests=1035
org.elasticsearch.index.fielddata.DoubleFieldDataTests=943
org.elasticsearch.index.fielddata.DuelFieldDataTests=6036
org.elasticsearch.index.fielddata.FieldDataTests=152
org.elasticsearch.index.fielddata.FilterFieldDataTests=650
org.elasticsearch.index.fielddata.FloatFieldDataTests=1246
org.elasticsearch.index.fielddata.IndexFieldDataServiceTests=2723
org.elasticsearch.index.fielddata.LongFieldDataTests=4912
org.elasticsearch.index.fielddata.NoOrdinalsStringFieldDataTests=5655
org.elasticsearch.index.fielddata.PagedBytesStringFieldDataTests=5923
org.elasticsearch.index.fielddata.ParentChildFieldDataTests=1012
org.elasticsearch.index.fielddata.ScriptDocValuesTests=224
org.elasticsearch.index.fielddata.SortedSetDVStringFieldDataTests=5307
org.elasticsearch.index.fielddata.fieldcomparator.ReplaceMissingTests=127
org.elasticsearch.index.fielddata.ordinals.MultiOrdinalsTests=132
org.elasticsearch.index.fielddata.ordinals.SingleOrdinalsTests=436
org.elasticsearch.index.indexing.IndexingSlowLogTests=49
org.elasticsearch.index.mapper.DocumentParserTests=371
org.elasticsearch.index.mapper.DynamicMappingTests=1335
org.elasticsearch.index.mapper.FieldTypeLookupTests=29
org.elasticsearch.index.mapper.MapperServiceTests=230
org.elasticsearch.index.mapper.UidTests=57
org.elasticsearch.index.mapper.all.SimpleAllMapperTests=1376
org.elasticsearch.index.mapper.binary.BinaryMappingTests=3554
org.elasticsearch.index.mapper.boost.CustomBoostMappingTests=243
org.elasticsearch.index.mapper.boost.FieldLevelBoostTests=2704
org.elasticsearch.index.mapper.camelcase.CamelCaseFieldNameTests=358
org.elasticsearch.index.mapper.completion.CompletionFieldMapperTests=429
org.elasticsearch.index.mapper.compound.CompoundTypesTests=332
org.elasticsearch.index.mapper.copyto.CopyToMapperTests=940
org.elasticsearch.index.mapper.core.BinaryFieldTypeTests=95
org.elasticsearch.index.mapper.core.BooleanFieldMapperTests=414
org.elasticsearch.index.mapper.core.BooleanFieldTypeTests=133
org.elasticsearch.index.mapper.core.ByteFieldTypeTests=86
org.elasticsearch.index.mapper.core.CompletionFieldTypeTests=113
org.elasticsearch.index.mapper.core.DateFieldTypeTests=73
org.elasticsearch.index.mapper.core.DoubleFieldTypeTests=54
org.elasticsearch.index.mapper.core.FloatFieldTypeTests=82
org.elasticsearch.index.mapper.core.IntegerFieldTypeTests=56
org.elasticsearch.index.mapper.core.LongFieldTypeTests=66
org.elasticsearch.index.mapper.core.ShortFieldTypeTests=66
org.elasticsearch.index.mapper.core.StringFieldTypeTests=79
org.elasticsearch.index.mapper.core.TokenCountFieldMapperTests=300
org.elasticsearch.index.mapper.date.DateBackwardsCompatibilityTests=1544
org.elasticsearch.index.mapper.date.SimpleDateMappingTests=378
org.elasticsearch.index.mapper.dynamictemplate.genericstore.GenericStoreDynamicTemplateTests=3256
org.elasticsearch.index.mapper.dynamictemplate.pathmatch.PathMatchDynamicTemplateTests=303
org.elasticsearch.index.mapper.dynamictemplate.simple.SimpleDynamicTemplatesTests=259
org.elasticsearch.index.mapper.externalvalues.SimpleExternalMappingTests=539
org.elasticsearch.index.mapper.geo.GeoEncodingTests=113
org.elasticsearch.index.mapper.geo.GeoPointFieldMapperTests=2345
org.elasticsearch.index.mapper.geo.GeoPointFieldTypeTests=113
org.elasticsearch.index.mapper.geo.GeoShapeFieldMapperTests=768
org.elasticsearch.index.mapper.geo.GeoShapeFieldTypeTests=92
org.elasticsearch.index.mapper.geo.GeohashMappingGeoPointTests=308
org.elasticsearch.index.mapper.id.IdMappingTests=712
org.elasticsearch.index.mapper.index.IndexTypeMapperTests=330
org.elasticsearch.index.mapper.internal.AllFieldTypeTests=95
org.elasticsearch.index.mapper.internal.FieldNamesFieldMapperTests=636
org.elasticsearch.index.mapper.internal.FieldNamesFieldTypeTests=119
org.elasticsearch.index.mapper.internal.IdFieldTypeTests=54
org.elasticsearch.index.mapper.internal.IndexFieldTypeTests=49
org.elasticsearch.index.mapper.internal.ParentFieldMapperTests=83
org.elasticsearch.index.mapper.internal.ParentFieldTypeTests=75
org.elasticsearch.index.mapper.internal.RoutingFieldTypeTests=72
org.elasticsearch.index.mapper.internal.SourceFieldTypeTests=129
org.elasticsearch.index.mapper.internal.TimestampFieldTypeTests=61
org.elasticsearch.index.mapper.internal.TypeFieldTypeTests=53
org.elasticsearch.index.mapper.internal.UidFieldTypeTests=30
org.elasticsearch.index.mapper.internal.VersionFieldTypeTests=39
org.elasticsearch.index.mapper.ip.SimpleIpMappingTests=592
org.elasticsearch.index.mapper.lucene.DoubleIndexingDocTests=142
org.elasticsearch.index.mapper.lucene.StoredNumericValuesTests=328
org.elasticsearch.index.mapper.merge.TestMergeMapperTests=1501
org.elasticsearch.index.mapper.multifield.MultiFieldTests=633
org.elasticsearch.index.mapper.multifield.merge.JavaMultiFieldMergeTests=218
org.elasticsearch.index.mapper.nested.NestedMappingTests=749
org.elasticsearch.index.mapper.null_value.NullValueTests=5152
org.elasticsearch.index.mapper.numeric.SimpleNumericTests=884
org.elasticsearch.index.mapper.object.NullValueObjectMappingTests=299
org.elasticsearch.index.mapper.object.SimpleObjectMappingTests=728
org.elasticsearch.index.mapper.parent.ParentMappingTests=294
org.elasticsearch.index.mapper.path.PathMapperTests=515
org.elasticsearch.index.mapper.routing.RoutingTypeMapperTests=497
org.elasticsearch.index.mapper.simple.SimpleMapperTests=258
org.elasticsearch.index.mapper.source.CompressSourceMappingTests=2902
org.elasticsearch.index.mapper.source.DefaultSourceMappingTests=1323
org.elasticsearch.index.mapper.string.SimpleStringMappingTests=177
org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests=702
org.elasticsearch.index.mapper.timestamp.TimestampMappingTests=2991
org.elasticsearch.index.mapper.ttl.TTLMappingTests=1822
org.elasticsearch.index.mapper.typelevels.ParseDocumentTypeLevelsTests=440
org.elasticsearch.index.mapper.typelevels.ParseMappingTypeLevelTests=187
org.elasticsearch.index.mapper.update.UpdateMappingTests=843
org.elasticsearch.index.query.BoolQueryBuilderTests=154
org.elasticsearch.index.query.BoostingQueryBuilderTests=183
org.elasticsearch.index.query.CombineFunctionTests=53
org.elasticsearch.index.query.CommonTermsQueryBuilderTests=95
org.elasticsearch.index.query.CommonTermsQueryParserTests=709
org.elasticsearch.index.query.ConstantScoreQueryBuilderTests=285
org.elasticsearch.index.query.DisMaxQueryBuilderTests=330
org.elasticsearch.index.query.ExistsQueryBuilderTests=139
org.elasticsearch.index.query.FieldMaskingSpanQueryBuilderTests=152
org.elasticsearch.index.query.FuzzyQueryBuilderTests=210
org.elasticsearch.index.query.GeoBoundingBoxQueryBuilderTests=315
org.elasticsearch.index.query.GeoDistanceQueryBuilderTests=192
org.elasticsearch.index.query.GeoDistanceRangeQueryTests=156
org.elasticsearch.index.query.GeoPolygonQueryBuilderTests=445
org.elasticsearch.index.query.GeoShapeQueryBuilderTests=246
org.elasticsearch.index.query.GeohashCellQueryBuilderTests=85
org.elasticsearch.index.query.HasChildQueryBuilderTests=255
org.elasticsearch.index.query.HasChildQueryParserTests=82
org.elasticsearch.index.query.HasParentQueryBuilderTests=336
org.elasticsearch.index.query.IdsQueryBuilderTests=197
org.elasticsearch.index.query.IndicesQueryBuilderTests=279
org.elasticsearch.index.query.MatchAllQueryBuilderTests=188
org.elasticsearch.index.query.MatchNoneQueryBuilderTests=257
org.elasticsearch.index.query.MatchQueryBuilderTests=2712
org.elasticsearch.index.query.MissingQueryBuilderTests=180
org.elasticsearch.index.query.MoreLikeThisQueryBuilderTests=3351
org.elasticsearch.index.query.MultiMatchQueryBuilderTests=59
org.elasticsearch.index.query.NestedQueryBuilderTests=193
org.elasticsearch.index.query.NotQueryBuilderTests=3071
org.elasticsearch.index.query.OperatorTests=90
org.elasticsearch.index.query.PrefixQueryBuilderTests=149
org.elasticsearch.index.query.QueryFilterBuilderTests=100
org.elasticsearch.index.query.QueryStringQueryBuilderTests=490
org.elasticsearch.index.query.RangeQueryBuilderTests=577
org.elasticsearch.index.query.RegexpQueryBuilderTests=235
org.elasticsearch.index.query.ScoreModeTests=52
org.elasticsearch.index.query.ScriptQueryBuilderTests=108
org.elasticsearch.index.query.SimpleQueryStringBuilderTests=158
org.elasticsearch.index.query.SpanContainingQueryBuilderTests=213
org.elasticsearch.index.query.SpanFirstQueryBuilderTests=105
org.elasticsearch.index.query.SpanMultiTermQueryBuilderTests=1847
org.elasticsearch.index.query.SpanNearQueryBuilderTests=91
org.elasticsearch.index.query.SpanNotQueryBuilderTests=589
org.elasticsearch.index.query.SpanOrQueryBuilderTests=2712
org.elasticsearch.index.query.SpanTermQueryBuilderTests=85
org.elasticsearch.index.query.SpanWithinQueryBuilderTests=61
org.elasticsearch.index.query.TemplateQueryBuilderTests=417
org.elasticsearch.index.query.TemplateQueryParserTests=288
org.elasticsearch.index.query.TermQueryBuilderTests=49
org.elasticsearch.index.query.TermsQueryBuilderTests=69
org.elasticsearch.index.query.TypeQueryBuilderTests=258
org.elasticsearch.index.query.WildcardQueryBuilderTests=38
org.elasticsearch.index.query.WrapperQueryBuilderTests=138
org.elasticsearch.index.query.functionscore.FieldValueFactorFunctionModifierTests=69
org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilderTests=227
org.elasticsearch.index.query.functionscore.ScoreFunctionBuilderTests=53
org.elasticsearch.index.query.support.QueryInnerHitsTests=53
org.elasticsearch.index.search.MultiMatchQueryTests=135
org.elasticsearch.index.search.geo.GeoPointParsingTests=90
org.elasticsearch.index.search.geo.GeoUtilsTests=142
org.elasticsearch.index.search.nested.DoubleNestedSortingTests=269
org.elasticsearch.index.search.nested.FloatNestedSortingTests=575
org.elasticsearch.index.search.nested.LongNestedSortingTests=305
org.elasticsearch.index.search.nested.NestedSortingTests=264
org.elasticsearch.index.shard.CommitPointsTests=58
org.elasticsearch.index.shard.IndexShardTests=2664
org.elasticsearch.index.shard.MergePolicySettingsTests=85
org.elasticsearch.index.shard.NewPathForShardTests=87
org.elasticsearch.index.shard.ShardPathTests=123
org.elasticsearch.index.shard.ShardUtilsTests=69
org.elasticsearch.index.shard.VersionFieldUpgraderTests=27
org.elasticsearch.index.similarity.SimilarityTests=1282
org.elasticsearch.index.snapshots.blobstore.FileInfoTests=350
org.elasticsearch.index.snapshots.blobstore.SlicedInputStreamTests=28
org.elasticsearch.index.store.DirectoryUtilsTests=179
org.elasticsearch.index.store.IndexStoreBWCTests=397
org.elasticsearch.index.store.IndexStoreTests=13
org.elasticsearch.index.store.LegacyVerificationTests=44
org.elasticsearch.index.store.StoreTests=433
org.elasticsearch.index.translog.BufferedTranslogTests=4946
org.elasticsearch.index.translog.TranslogTests=4070
org.elasticsearch.index.translog.TranslogVersionTests=42
org.elasticsearch.indices.IndicesLifecycleListenerSingleNodeTests=611
org.elasticsearch.indices.IndicesModuleTests=1493
org.elasticsearch.indices.IndicesServiceTests=5140
org.elasticsearch.indices.cache.query.terms.TermsLookupTests=22
org.elasticsearch.indices.flush.SyncedFlushSingleNodeTests=1243
org.elasticsearch.indices.flush.SyncedFlushUnitTests=64
org.elasticsearch.indices.memory.IndexingMemoryControllerTests=65
org.elasticsearch.indices.memory.breaker.CircuitBreakerUnitTests=101
org.elasticsearch.indices.recovery.RecoverySourceHandlerTests=691
org.elasticsearch.indices.recovery.RecoveryStateTests=153
org.elasticsearch.indices.recovery.RecoveryStatusTests=62
org.elasticsearch.indices.recovery.StartRecoveryRequestTests=84
org.elasticsearch.indices.store.IndicesStoreTests=83
org.elasticsearch.monitor.fs.FsProbeTests=41
org.elasticsearch.monitor.jvm.JvmStatsTests=43
org.elasticsearch.monitor.os.OsProbeTests=45
org.elasticsearch.monitor.process.ProcessProbeTests=42
org.elasticsearch.node.internal.InternalSettingsPreparerTests=140
org.elasticsearch.plugins.PluginInfoTests=372
org.elasticsearch.plugins.PluginManagerCliTests=153
org.elasticsearch.plugins.PluginManagerUnitTests=51
org.elasticsearch.plugins.PluginsServiceTests=68
org.elasticsearch.recovery.RecoveriesCollectionTests=430
org.elasticsearch.recovery.RecoverySettingsTests=569
org.elasticsearch.rest.BytesRestResponseTests=83
org.elasticsearch.rest.HeadersAndContextCopyClientTests=194
org.elasticsearch.rest.RestFilterChainTests=77
org.elasticsearch.rest.RestRequestTests=39
org.elasticsearch.rest.action.support.RestTableTests=88
org.elasticsearch.rest.util.RestUtilsTests=85
org.elasticsearch.script.FileScriptTests=39
org.elasticsearch.script.NativeScriptTests=111
org.elasticsearch.script.ScriptContextRegistryTests=27
org.elasticsearch.script.ScriptContextTests=85
org.elasticsearch.script.ScriptModesTests=115
org.elasticsearch.script.ScriptParameterParserTests=173
org.elasticsearch.script.ScriptServiceTests=421
org.elasticsearch.script.mustache.MustacheScriptEngineTests=115
org.elasticsearch.script.mustache.MustacheTests=65
org.elasticsearch.search.MultiValueModeTests=149
org.elasticsearch.search.SearchModuleTests=89
org.elasticsearch.search.SearchServiceTests=1170
org.elasticsearch.search.aggregations.AggregationCollectorTests=644
org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests=419
org.elasticsearch.search.aggregations.bucket.significant.SignificanceHeuristicTests=120
org.elasticsearch.search.aggregations.metrics.cardinality.HyperLogLogPlusPlusTests=695
org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests=27
org.elasticsearch.search.aggregations.pipeline.moving.avg.MovAvgUnitTests=122
org.elasticsearch.search.aggregations.support.MissingValuesTests=52
org.elasticsearch.search.aggregations.support.PathTests=90
org.elasticsearch.search.aggregations.support.ScriptValuesTests=67
org.elasticsearch.search.builder.SearchSourceBuilderTests=49
org.elasticsearch.search.compress.SearchSourceCompressTests=3136
org.elasticsearch.search.fetch.innerhits.NestedChildrenFilterTests=128
org.elasticsearch.search.internal.InternalSearchHitTests=46
org.elasticsearch.search.query.QueryPhaseTests=185
org.elasticsearch.search.sort.SortParserTests=319
org.elasticsearch.search.stats.SearchStatsUnitTests=50
org.elasticsearch.search.suggest.CompletionTokenStreamTests=160
org.elasticsearch.search.suggest.completion.CompletionPostingsFormatTests=1319
org.elasticsearch.search.suggest.context.GeoLocationContextMappingTests=109
org.elasticsearch.search.suggest.phrase.NoisyChannelSpellCheckerTests=1409
org.elasticsearch.snapshots.SnapshotRequestsTests=30
org.elasticsearch.snapshots.SnapshotUtilsTests=30
org.elasticsearch.test.rest.test.AssertionParsersTests=30
org.elasticsearch.test.rest.test.DoSectionParserTests=198
org.elasticsearch.test.rest.test.FileUtilsTests=60
org.elasticsearch.test.rest.test.JsonPathTests=83
org.elasticsearch.test.rest.test.RestApiParserFailingTests=102
org.elasticsearch.test.rest.test.RestApiParserTests=73
org.elasticsearch.test.rest.test.RestTestParserTests=145
org.elasticsearch.test.rest.test.SetSectionParserTests=116
org.elasticsearch.test.rest.test.SetupSectionParserTests=248
org.elasticsearch.test.rest.test.SkipSectionParserTests=95
org.elasticsearch.test.rest.test.TestSectionParserTests=134
org.elasticsearch.test.test.InternalTestClusterTests=40
org.elasticsearch.test.test.LoggingListenerTests=72
org.elasticsearch.test.test.VersionUtilsTests=28
org.elasticsearch.threadpool.ThreadPoolSerializationTests=56
org.elasticsearch.threadpool.ThreadPoolStatsTests=99
org.elasticsearch.threadpool.UpdateThreadPoolSettingsTests=24
org.elasticsearch.transport.NettySizeHeaderFrameDecoderTests=183
org.elasticsearch.transport.TransportMessageTests=51
org.elasticsearch.transport.local.SimpleLocalTransportTests=1174
org.elasticsearch.transport.netty.KeyedLockTests=414
org.elasticsearch.transport.netty.NettyScheduledPingTests=1662
org.elasticsearch.transport.netty.NettyTransportMultiPortTests=382
org.elasticsearch.transport.netty.NettyTransportTests=137
org.elasticsearch.transport.netty.SimpleNettyTransportTests=5528
org.elasticsearch.tribe.TribeUnitTests=2098
org.elasticsearch.watcher.FileWatcherTests=203
org.elasticsearch.watcher.ResourceWatcherServiceTests=101

129
core/build.gradle Normal file
View File

@ -0,0 +1,129 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.test.RestSpecHack
apply plugin: 'elasticsearch.build'
apply plugin: 'com.bmuschko.nexus'
apply plugin: 'nebula.optional-base'
archivesBaseName = 'elasticsearch'
dependencies {
// lucene
compile "org.apache.lucene:lucene-core:${versions.lucene}"
compile "org.apache.lucene:lucene-analyzers-common:${versions.lucene}"
compile "org.apache.lucene:lucene-backward-codecs:${versions.lucene}"
compile "org.apache.lucene:lucene-grouping:${versions.lucene}"
compile "org.apache.lucene:lucene-highlighter:${versions.lucene}"
compile "org.apache.lucene:lucene-join:${versions.lucene}"
compile "org.apache.lucene:lucene-memory:${versions.lucene}"
compile "org.apache.lucene:lucene-misc:${versions.lucene}"
compile "org.apache.lucene:lucene-queries:${versions.lucene}"
compile "org.apache.lucene:lucene-queryparser:${versions.lucene}"
compile "org.apache.lucene:lucene-sandbox:${versions.lucene}"
compile "org.apache.lucene:lucene-spatial:${versions.lucene}"
compile "org.apache.lucene:lucene-spatial3d:${versions.lucene}"
compile "org.apache.lucene:lucene-suggest:${versions.lucene}"
compile 'org.elasticsearch:securesm:1.0'
// utilities
compile 'commons-cli:commons-cli:1.3.1'
compile 'com.carrotsearch:hppc:0.7.1'
// time handling, remove with java 8 time
compile 'joda-time:joda-time:2.8.2'
// joda 2.0 moved to using volatile fields for datetime
// When updating to a new version, make sure to update our copy of BaseDateTime
compile 'org.joda:joda-convert:1.2'
// json and yaml
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${versions.jackson}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}"
// network stack
compile 'io.netty:netty:3.10.5.Final'
// percentiles aggregation
compile 'com.tdunning:t-digest:3.0'
// precentil ranks aggregation
compile 'org.hdrhistogram:HdrHistogram:2.1.6'
// lucene spatial
compile "com.spatial4j:spatial4j:${versions.spatial4j}", optional
compile "com.vividsolutions:jts:${versions.jts}", optional
// logging
compile "log4j:log4j:${versions.log4j}", optional
compile "log4j:apache-log4j-extras:${versions.log4j}", optional
compile "org.slf4j:slf4j-api:${versions.slf4j}", optional
compile "net.java.dev.jna:jna:${versions.jna}", optional
if (isEclipse == false || project.path == ":core-tests") {
testCompile("org.elasticsearch:test-framework:${version}") {
// tests use the locally compiled version of core
exclude group: 'org.elasticsearch', module: 'elasticsearch'
}
}
}
if (isEclipse) {
// in eclipse the project is under a fake root, we need to change around the source sets
sourceSets {
if (project.path == ":core") {
main.java.srcDirs = ['java']
main.resources.srcDirs = ['resources']
} else {
test.java.srcDirs = ['java']
test.resources.srcDirs = ['resources']
}
}
}
compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-fallthrough,-overrides,-rawtypes,-serial,-try,-unchecked"
compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-fallthrough,-overrides,-rawtypes,-serial,-try,-unchecked"
forbiddenPatterns {
exclude '**/*.json'
exclude '**/*.jmx'
exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt'
}
// dependency license are currently checked in distribution
dependencyLicenses.enabled = false
if (isEclipse == false || project.path == ":core-tests") {
task integTest(type: RandomizedTestingTask,
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Multi-node tests',
dependsOn: test.dependsOn) {
configure(BuildPlugin.commonTestConfig(project))
classpath = project.test.classpath
testClassesDir = project.test.testClassesDir
include '**/*IT.class'
}
check.dependsOn integTest
integTest.mustRunAfter test
}

View File

@ -1,362 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.elasticsearch</groupId>
<artifactId>parent</artifactId>
<version>3.0.0-SNAPSHOT</version>
</parent>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<name>Elasticsearch: Core</name>
<description>Elasticsearch - Open Source, Distributed, RESTful Search Engine</description>
<properties>
<xlint.options>-Xlint:-cast,-deprecation,-fallthrough,-overrides,-rawtypes,-serial,-try,-unchecked</xlint.options>
</properties>
<dependencies>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.carrotsearch.randomizedtesting</groupId>
<artifactId>randomizedtesting-runner</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-test-framework</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.jimfs</groupId>
<artifactId>jimfs</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queries</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-memory</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-highlighter</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-queryparser</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-suggest</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-join</artifactId>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-spatial</artifactId>
</dependency>
<dependency>
<groupId>com.spatial4j</groupId>
<artifactId>spatial4j</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.vividsolutions</groupId>
<artifactId>jts</artifactId>
<optional>true</optional>
</dependency>
<!-- needed for templating -->
<dependency>
<groupId>com.github.spullara.mustache.java</groupId>
<artifactId>compiler</artifactId>
<optional>true</optional>
</dependency>
<!-- Lucene spatial -->
<dependency>
<groupId>com.carrotsearch</groupId>
<artifactId>hppc</artifactId>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
</dependency>
<dependency>
<groupId>org.joda</groupId>
<artifactId>joda-convert</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-smile</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-cbor</artifactId>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
</dependency>
<dependency>
<groupId>com.ning</groupId>
<artifactId>compress-lzf</artifactId>
</dependency>
<dependency>
<groupId>com.tdunning</groupId>
<artifactId>t-digest</artifactId>
</dependency>
<dependency>
<groupId>org.hdrhistogram</groupId>
<artifactId>HdrHistogram</artifactId>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>apache-log4j-extras</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>net.java.dev.jna</groupId>
<artifactId>jna</artifactId>
<optional>true</optional>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>${project.basedir}/src/main/resources</directory>
<includes>
<include>es-build.properties</include>
</includes>
<filtering>true</filtering>
</resource>
<resource>
<directory>${project.basedir}/src/main/resources</directory>
<includes>
<include>**/*.*</include>
</includes>
</resource>
</resources>
<testResources>
<testResource>
<directory>${project.basedir}/src/test/resources</directory>
<includes>
<include>**/*.*</include>
</includes>
</testResource>
<testResource>
<directory>${elasticsearch.tools.directory}/rest-api-spec</directory>
<targetPath>rest-api-spec</targetPath>
<includes>
<include>api/*.json</include>
<include>test/**/*.yaml</include>
</includes>
</testResource>
<!-- shared test resources like log4j.properties -->
<testResource>
<directory>${elasticsearch.tools.directory}/shared-test-resources</directory>
<filtering>false</filtering>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-test-sources</id>
<goals>
<goal>test-jar</goal>
</goals>
<configuration>
<includes>
<include>org/elasticsearch/test/**/*</include>
<include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include>
<include>org/elasticsearch/bootstrap/MockPluginPolicy.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include>
</includes>
<excludes>
<!-- unit tests for yaml suite parser & rest spec parser need to be excluded -->
<exclude>org/elasticsearch/test/rest/test/**/*</exclude>
<!-- unit tests for test framework classes-->
<exclude>org/elasticsearch/test/test/**/*</exclude>
</excludes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<phase>prepare-package</phase>
<goals>
<goal>test-jar</goal>
</goals>
<configuration>
<includes>
<include>rest-api-spec/**/*</include>
<include>org/elasticsearch/test/**/*</include>
<include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include>
<include>org/elasticsearch/bootstrap/MockPluginPolicy.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include>
<include>org/elasticsearch/cluster/MockInternalClusterInfoService.class</include>
<include>org/elasticsearch/cluster/MockInternalClusterInfoService$*.class</include>
<include>org/elasticsearch/index/MockEngineFactoryPlugin.class</include>
<include>org/elasticsearch/search/MockSearchService.class</include>
<include>org/elasticsearch/search/MockSearchService$*.class</include>
<include>org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.class</include>
<include>org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams.class</include>
<include>org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptNoParams$*.class</include>
<include>org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams.class</include>
<include>org/elasticsearch/search/aggregations/bucket/script/NativeSignificanceScoreScriptWithParams$*.class</include>
<include>org/elasticsearch/search/aggregations/bucket/script/TestScript.class</include>
<include>org/elasticsearch/search/aggregations/metrics/AbstractNumericTestCase.class</include>
<include>org/elasticsearch/percolator/PercolatorTestUtil.class</include>
<include>org/elasticsearch/cache/recycler/MockPageCacheRecycler.class</include>
<include>org/elasticsearch/cache/recycler/MockPageCacheRecycler$*.class</include>
<include>org/elasticsearch/common/util/MockBigArrays.class</include>
<include>org/elasticsearch/common/util/MockBigArrays$*.class</include>
<include>org/elasticsearch/node/NodeMocksPlugin.class</include>
<include>org/elasticsearch/node/MockNode.class</include>
<include>org/elasticsearch/common/io/PathUtilsForTesting.class</include>
</includes>
<excludes>
<!-- unit tests for yaml suite parser & rest spec parser need to be excluded -->
<exclude>org/elasticsearch/test/rest/test/**/*</exclude>
<!-- unit tests for test framework classes-->
<exclude>org/elasticsearch/test/test/**/*</exclude>
</excludes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<!-- Don't run the license checker in core -->
<id>check-license</id>
<phase>none</phase>
</execution>
</executions>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>org/apache/lucene/**</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<configuration>
<excludes>
<!-- Guice -->
<exclude>src/main/java/org/elasticsearch/common/inject/**</exclude>
<!-- Forks of Lucene classes -->
<exclude>src/main/java/org/apache/lucene/**/X*.java</exclude>
<!-- netty pipelining -->
<exclude>src/main/java/org/elasticsearch/http/netty/pipelining/**</exclude>
<!-- Guava -->
<exclude>src/main/java/org/elasticsearch/common/network/InetAddresses.java</exclude>
<exclude>src/test/java/org/elasticsearch/common/network/InetAddressesTests.java</exclude>
<exclude>src/test/java/org/elasticsearch/common/collect/EvictingQueueTests.java</exclude>
<!-- Joda -->
<exclude>src/main/java/org/joda/time/base/BaseDateTime.java</exclude>
<exclude>src/main/java/org/joda/time/format/StrictISODateTimeFormat.java</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<profiles>
<!-- license profile, to generate third party license file -->
<profile>
<id>license</id>
<activation>
<property>
<name>license.generation</name>
<value>true</value>
</property>
</activation>
<!-- not including license-maven-plugin is sufficent to expose default license -->
</profile>
</profiles>
</project>

View File

@ -0,0 +1,3 @@
// this is just shell gradle file for eclipse to have separate projects for core src and tests
apply from: '../../build.gradle'

View File

@ -18,18 +18,9 @@
*/
package org.apache.lucene.queries;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.TermState;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.ToStringUtils;
@ -37,6 +28,7 @@ import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
/**
* BlendedTermQuery can be used to unify term statistics across
@ -77,6 +69,10 @@ public abstract class BlendedTermQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query rewritten = super.rewrite(reader);
if (rewritten != this) {
return rewritten;
}
IndexReaderContext context = reader.getContext();
TermContext[] ctx = new TermContext[terms.length];
int[] docFreqs = new int[ctx.length];
@ -87,9 +83,7 @@ public abstract class BlendedTermQuery extends Query {
final int maxDoc = reader.maxDoc();
blend(ctx, maxDoc, reader);
Query query = topLevelQuery(terms, ctx, docFreqs, maxDoc);
query.setBoost(getBoost());
return query;
return topLevelQuery(terms, ctx, docFreqs, maxDoc);
}
protected abstract Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc);
@ -274,20 +268,15 @@ public abstract class BlendedTermQuery extends Query {
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
BlendedTermQuery that = (BlendedTermQuery) o;
if (!Arrays.equals(equalsTerms(), that.equalsTerms())) return false;
return true;
return Arrays.equals(equalsTerms(), that.equalsTerms());
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + Arrays.hashCode(equalsTerms());
return result;
return Objects.hash(super.hashCode(), Arrays.hashCode(equalsTerms()));
}
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final boolean disableCoord) {
@ -298,16 +287,16 @@ public abstract class BlendedTermQuery extends Query {
return new BlendedTermQuery(terms, boosts) {
@Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery.Builder query = new BooleanQuery.Builder();
query.setDisableCoord(disableCoord);
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
booleanQueryBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) {
TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
if (boosts != null) {
termQuery.setBoost(boosts[i]);
Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) {
query = new BoostQuery(query, boosts[i]);
}
query.add(termQuery, BooleanClause.Occur.SHOULD);
booleanQueryBuilder.add(query, BooleanClause.Occur.SHOULD);
}
return query.build();
return booleanQueryBuilder.build();
}
};
}
@ -321,16 +310,16 @@ public abstract class BlendedTermQuery extends Query {
BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder();
lowBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) {
TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
if (boosts != null) {
termQuery.setBoost(boosts[i]);
Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) {
query = new BoostQuery(query, boosts[i]);
}
if ((maxTermFrequency >= 1f && docFreqs[i] > maxTermFrequency)
|| (docFreqs[i] > (int) Math.ceil(maxTermFrequency
* (float) maxDoc))) {
highBuilder.add(termQuery, BooleanClause.Occur.SHOULD);
highBuilder.add(query, BooleanClause.Occur.SHOULD);
} else {
lowBuilder.add(termQuery, BooleanClause.Occur.SHOULD);
lowBuilder.add(query, BooleanClause.Occur.SHOULD);
}
}
BooleanQuery high = highBuilder.build();
@ -363,15 +352,15 @@ public abstract class BlendedTermQuery extends Query {
return new BlendedTermQuery(terms, boosts) {
@Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
DisjunctionMaxQuery query = new DisjunctionMaxQuery(tieBreakerMultiplier);
DisjunctionMaxQuery disMaxQuery = new DisjunctionMaxQuery(tieBreakerMultiplier);
for (int i = 0; i < terms.length; i++) {
TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
if (boosts != null) {
termQuery.setBoost(boosts[i]);
Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) {
query = new BoostQuery(query, boosts[i]);
}
query.add(termQuery);
disMaxQuery.add(query);
}
return query;
return disMaxQuery;
}
};
}

View File

@ -29,6 +29,7 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import java.io.IOException;
import java.util.Objects;
/** A {@link Query} that only matches documents that are greater than or equal
* to a configured doc ID. */
@ -43,7 +44,7 @@ public final class MinDocQuery extends Query {
@Override
public int hashCode() {
return 31 * super.hashCode() + minDoc;
return Objects.hash(super.hashCode(), minDoc);
}
@Override

View File

@ -23,13 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.*;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries;
@ -41,12 +35,7 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.*;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
@ -65,7 +54,6 @@ public class MapperQueryParser extends QueryParser {
static {
Map<String, FieldQueryExtension> fieldQueryExtensions = new HashMap<>();
fieldQueryExtensions.put(ExistsFieldQueryExtension.NAME, new ExistsFieldQueryExtension());
fieldQueryExtensions.put(MissingFieldQueryExtension.NAME, new MissingFieldQueryExtension());
FIELD_QUERY_EXTENSIONS = unmodifiableMap(fieldQueryExtensions);
}
@ -148,8 +136,7 @@ public class MapperQueryParser extends QueryParser {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -161,8 +148,7 @@ public class MapperQueryParser extends QueryParser {
for (String mField : fields) {
Query q = getFieldQuerySingle(mField, queryText, quoted);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
@ -250,9 +236,8 @@ public class MapperQueryParser extends QueryParser {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
added = true;
applyBoost(mField, q);
q = applySlop(q, slop);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -264,9 +249,8 @@ public class MapperQueryParser extends QueryParser {
for (String mField : fields) {
Query q = super.getFieldQuery(mField, queryText, slop);
if (q != null) {
applyBoost(mField, q);
q = applySlop(q, slop);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
@ -305,8 +289,7 @@ public class MapperQueryParser extends QueryParser {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -318,8 +301,7 @@ public class MapperQueryParser extends QueryParser {
for (String mField : fields) {
Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
@ -371,8 +353,7 @@ public class MapperQueryParser extends QueryParser {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -383,8 +364,9 @@ public class MapperQueryParser extends QueryParser {
List<BooleanClause> clauses = new ArrayList<>();
for (String mField : fields) {
Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity);
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
if (q != null) {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
return getBooleanQuery(clauses, true);
}
@ -434,8 +416,7 @@ public class MapperQueryParser extends QueryParser {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -447,8 +428,7 @@ public class MapperQueryParser extends QueryParser {
for (String mField : fields) {
Query q = getPrefixQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
@ -566,8 +546,7 @@ public class MapperQueryParser extends QueryParser {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -579,8 +558,7 @@ public class MapperQueryParser extends QueryParser {
for (String mField : fields) {
Query q = getWildcardQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
@ -697,8 +675,7 @@ public class MapperQueryParser extends QueryParser {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
added = true;
applyBoost(mField, q);
disMaxQuery.add(q);
disMaxQuery.add(applyBoost(mField, q));
}
}
if (!added) {
@ -710,8 +687,7 @@ public class MapperQueryParser extends QueryParser {
for (String mField : fields) {
Query q = getRegexpQuerySingle(mField, termStr);
if (q != null) {
applyBoost(mField, q);
clauses.add(new BooleanClause(q, BooleanClause.Occur.SHOULD));
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
@ -761,11 +737,12 @@ public class MapperQueryParser extends QueryParser {
return fixNegativeQueryIfNeeded(q);
}
private void applyBoost(String field, Query q) {
private Query applyBoost(String field, Query q) {
Float fieldBoost = settings.fieldsAndWeights().get(field);
if (fieldBoost != null) {
q.setBoost(fieldBoost);
if (fieldBoost != null && fieldBoost != 1f) {
return new BoostQuery(q, fieldBoost);
}
return q;
}
private Query applySlop(Query q, int slop) {
@ -779,7 +756,9 @@ public class MapperQueryParser extends QueryParser {
builder.add(terms[i], positions[i]);
}
pq = builder.build();
pq.setBoost(q.getBoost());
//make sure that the boost hasn't been set beforehand, otherwise we'd lose it
assert q.getBoost() == 1f;
assert q instanceof BoostQuery == false;
return pq;
} else if (q instanceof MultiPhraseQuery) {
((MultiPhraseQuery) q).setSlop(slop);

View File

@ -1,258 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.search.suggest.analyzing;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStreamToAutomaton;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.automaton.*;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.PairOutputs;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
/**
* Implements a fuzzy {@link AnalyzingSuggester}. The similarity measurement is
* based on the Damerau-Levenshtein (optimal string alignment) algorithm, though
* you can explicitly choose classic Levenshtein by passing <code>false</code>
* for the <code>transpositions</code> parameter.
* <p>
* At most, this query will match terms up to
* {@value org.apache.lucene.util.automaton.LevenshteinAutomata#MAXIMUM_SUPPORTED_DISTANCE}
* edits. Higher distances are not supported. Note that the
* fuzzy distance is measured in "byte space" on the bytes
* returned by the {@link org.apache.lucene.analysis.TokenStream}'s {@link
* org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute}, usually UTF8. By default
* the analyzed bytes must be at least 3 {@link
* #DEFAULT_MIN_FUZZY_LENGTH} bytes before any edits are
* considered. Furthermore, the first 1 {@link
* #DEFAULT_NON_FUZZY_PREFIX} byte is not allowed to be
* edited. We allow up to 1 (@link
* #DEFAULT_MAX_EDITS} edit.
* If {@link #unicodeAware} parameter in the constructor is set to true, maxEdits,
* minFuzzyLength, transpositions and nonFuzzyPrefix are measured in Unicode code
* points (actual letters) instead of bytes.*
*
* <p>
* NOTE: This suggester does not boost suggestions that
* required no edits over suggestions that did require
* edits. This is a known limitation.
*
* <p>
* Note: complex query analyzers can have a significant impact on the lookup
* performance. It's recommended to not use analyzers that drop or inject terms
* like synonyms to keep the complexity of the prefix intersection low for good
* lookup performance. At index time, complex analyzers can safely be used.
* </p>
*
* @lucene.experimental
*/
public final class XFuzzySuggester extends XAnalyzingSuggester {
private final int maxEdits;
private final boolean transpositions;
private final int nonFuzzyPrefix;
private final int minFuzzyLength;
private final boolean unicodeAware;
/**
* Measure maxEdits, minFuzzyLength, transpositions and nonFuzzyPrefix
* parameters in Unicode code points (actual letters)
* instead of bytes.
*/
public static final boolean DEFAULT_UNICODE_AWARE = false;
/**
* The default minimum length of the key passed to {@link
* #lookup} before any edits are allowed.
*/
public static final int DEFAULT_MIN_FUZZY_LENGTH = 3;
/**
* The default prefix length where edits are not allowed.
*/
public static final int DEFAULT_NON_FUZZY_PREFIX = 1;
/**
* The default maximum number of edits for fuzzy
* suggestions.
*/
public static final int DEFAULT_MAX_EDITS = 1;
/**
* The default transposition value passed to {@link org.apache.lucene.util.automaton.LevenshteinAutomata}
*/
public static final boolean DEFAULT_TRANSPOSITIONS = true;
/**
* Creates a {@link FuzzySuggester} instance initialized with default values.
*
* @param analyzer the analyzer used for this suggester
*/
public XFuzzySuggester(Analyzer analyzer) {
this(analyzer, analyzer);
}
/**
* Creates a {@link FuzzySuggester} instance with an index &amp; a query analyzer initialized with default values.
*
* @param indexAnalyzer
* Analyzer that will be used for analyzing suggestions while building the index.
* @param queryAnalyzer
* Analyzer that will be used for analyzing query text during lookup
*/
public XFuzzySuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer) {
this(indexAnalyzer, null, queryAnalyzer, EXACT_FIRST | PRESERVE_SEP, 256, -1, DEFAULT_MAX_EDITS, DEFAULT_TRANSPOSITIONS,
DEFAULT_NON_FUZZY_PREFIX, DEFAULT_MIN_FUZZY_LENGTH, DEFAULT_UNICODE_AWARE, null, false, 0, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER);
}
/**
* Creates a {@link FuzzySuggester} instance.
*
* @param indexAnalyzer Analyzer that will be used for
* analyzing suggestions while building the index.
* @param queryAnalyzer Analyzer that will be used for
* analyzing query text during lookup
* @param options see {@link #EXACT_FIRST}, {@link #PRESERVE_SEP}
* @param maxSurfaceFormsPerAnalyzedForm Maximum number of
* surface forms to keep for a single analyzed form.
* When there are too many surface forms we discard the
* lowest weighted ones.
* @param maxGraphExpansions Maximum number of graph paths
* to expand from the analyzed form. Set this to -1 for
* no limit.
* @param maxEdits must be &gt;= 0 and &lt;= {@link org.apache.lucene.util.automaton.LevenshteinAutomata#MAXIMUM_SUPPORTED_DISTANCE} .
* @param transpositions <code>true</code> if transpositions should be treated as a primitive
* edit operation. If this is false, comparisons will implement the classic
* Levenshtein algorithm.
* @param nonFuzzyPrefix length of common (non-fuzzy) prefix (see default {@link #DEFAULT_NON_FUZZY_PREFIX}
* @param minFuzzyLength minimum length of lookup key before any edits are allowed (see default {@link #DEFAULT_MIN_FUZZY_LENGTH})
* @param sepLabel separation label
* @param payloadSep payload separator byte
* @param endByte end byte marker byte
*/
public XFuzzySuggester(Analyzer indexAnalyzer, Automaton queryPrefix, Analyzer queryAnalyzer, int options, int maxSurfaceFormsPerAnalyzedForm, int maxGraphExpansions,
int maxEdits, boolean transpositions, int nonFuzzyPrefix, int minFuzzyLength, boolean unicodeAware,
FST<PairOutputs.Pair<Long, BytesRef>> fst, boolean hasPayloads, int maxAnalyzedPathsForOneInput,
int sepLabel, int payloadSep, int endByte, int holeCharacter) {
super(indexAnalyzer, queryPrefix, queryAnalyzer, options, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, true, fst, hasPayloads, maxAnalyzedPathsForOneInput, sepLabel, payloadSep, endByte, holeCharacter);
if (maxEdits < 0 || maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
throw new IllegalArgumentException("maxEdits must be between 0 and " + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
}
if (nonFuzzyPrefix < 0) {
throw new IllegalArgumentException("nonFuzzyPrefix must not be >= 0 (got " + nonFuzzyPrefix + ")");
}
if (minFuzzyLength < 0) {
throw new IllegalArgumentException("minFuzzyLength must not be >= 0 (got " + minFuzzyLength + ")");
}
this.maxEdits = maxEdits;
this.transpositions = transpositions;
this.nonFuzzyPrefix = nonFuzzyPrefix;
this.minFuzzyLength = minFuzzyLength;
this.unicodeAware = unicodeAware;
}
@Override
protected List<FSTUtil.Path<PairOutputs.Pair<Long,BytesRef>>> getFullPrefixPaths(List<FSTUtil.Path<PairOutputs.Pair<Long,BytesRef>>> prefixPaths,
Automaton lookupAutomaton,
FST<PairOutputs.Pair<Long,BytesRef>> fst)
throws IOException {
// TODO: right now there's no penalty for fuzzy/edits,
// ie a completion whose prefix matched exactly what the
// user typed gets no boost over completions that
// required an edit, which get no boost over completions
// requiring two edits. I suspect a multiplicative
// factor is appropriate (eg, say a fuzzy match must be at
// least 2X better weight than the non-fuzzy match to
// "compete") ... in which case I think the wFST needs
// to be log weights or something ...
Automaton levA = convertAutomaton(toLevenshteinAutomata(lookupAutomaton));
/*
Writer w = new OutputStreamWriter(new FileOutputStream("out.dot"), "UTF-8");
w.write(levA.toDot());
w.close();
System.out.println("Wrote LevA to out.dot");
*/
return FSTUtil.intersectPrefixPaths(levA, fst);
}
@Override
protected Automaton convertAutomaton(Automaton a) {
if (unicodeAware) {
// FLORIAN EDIT: get converted Automaton from superclass
Automaton utf8automaton = new UTF32ToUTF8().convert(super.convertAutomaton(a));
// This automaton should not blow up during determinize:
utf8automaton = Operations.determinize(utf8automaton, Integer.MAX_VALUE);
return utf8automaton;
} else {
return super.convertAutomaton(a);
}
}
@Override
public TokenStreamToAutomaton getTokenStreamToAutomaton() {
final TokenStreamToAutomaton tsta = super.getTokenStreamToAutomaton();
tsta.setUnicodeArcs(unicodeAware);
return tsta;
}
Automaton toLevenshteinAutomata(Automaton automaton) {
List<Automaton> subs = new ArrayList<>();
FiniteStringsIterator finiteStrings = new FiniteStringsIterator(automaton);
for (IntsRef string; (string = finiteStrings.next()) != null;) {
if (string.length <= nonFuzzyPrefix || string.length < minFuzzyLength) {
subs.add(Automata.makeString(string.ints, string.offset, string.length));
} else {
int ints[] = new int[string.length-nonFuzzyPrefix];
System.arraycopy(string.ints, string.offset+nonFuzzyPrefix, ints, 0, ints.length);
// TODO: maybe add alphaMin to LevenshteinAutomata,
// and pass 1 instead of 0? We probably don't want
// to allow the trailing dedup bytes to be
// edited... but then 0 byte is "in general" allowed
// on input (but not in UTF8).
LevenshteinAutomata lev = new LevenshteinAutomata(ints, unicodeAware ? Character.MAX_CODE_POINT : 255, transpositions);
subs.add(lev.toAutomaton(maxEdits, UnicodeUtil.newString(string.ints, string.offset, nonFuzzyPrefix)));
}
}
if (subs.isEmpty()) {
// automaton is empty, there is no accepted paths through it
return Automata.makeEmpty(); // matches nothing
} else if (subs.size() == 1) {
// no synonyms or anything: just a single path through the tokenstream
return subs.get(0);
} else {
// multiple paths: this is really scary! is it slow?
// maybe we should not do this and throw UOE?
Automaton a = Operations.union(subs);
// TODO: we could call toLevenshteinAutomata() before det?
// this only happens if you have multiple paths anyway (e.g. synonyms)
return Operations.determinize(a, DEFAULT_MAX_DETERMINIZED_STATES);
}
}
}

View File

@ -22,14 +22,7 @@ package org.apache.lucene.search.vectorhighlight;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.BlendedTermQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
@ -65,9 +58,6 @@ public class CustomFieldQuery extends FieldQuery {
flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries, boost);
} else if (sourceQuery instanceof FunctionScoreQuery) {
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost);
} else if (sourceQuery instanceof FilteredQuery) {
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries, boost);
flatten(((FilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
} else if (sourceQuery instanceof MultiPhrasePrefixQuery) {
flatten(sourceQuery.rewrite(reader), reader, flatQueries, boost);
} else if (sourceQuery instanceof FiltersFunctionScoreQuery) {
@ -109,8 +99,7 @@ public class CustomFieldQuery extends FieldQuery {
for (int i = 0; i < termsIdx.length; i++) {
queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]);
}
PhraseQuery query = queryBuilder.build();
query.setBoost(orig.getBoost());
Query query = queryBuilder.build();
this.flatten(query, reader, flatQueries, orig.getBoost());
} else {
Term[] t = terms.get(currentPos);
@ -120,14 +109,4 @@ public class CustomFieldQuery extends FieldQuery {
}
}
}
void flatten(Filter sourceFilter, IndexReader reader, Collection<Query> flatQueries) throws IOException {
Boolean highlight = highlightFilters.get();
if (highlight == null || highlight.equals(Boolean.FALSE)) {
return;
}
if (sourceFilter instanceof QueryWrapperFilter) {
flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries, 1.0F);
}
}
}

View File

@ -36,7 +36,7 @@ public class StoreRateLimiting {
void onPause(long nanos);
}
public static enum Type {
public enum Type {
NONE,
MERGE,
ALL;

View File

@ -1,279 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.util;
import java.util.ArrayList;
import java.util.Collection;
/**
* Utilities for converting to/from the GeoHash standard
*
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
* representing the level (1-12) [xyxy...xyxyllll]
*
* This differs from a morton encoded value which interleaves lat/lon (y/x).
*
* @lucene.experimental
*/
public class XGeoHashUtils {
public static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
public static final String BASE_32_STRING = new String(BASE_32);
public static final int PRECISION = 12;
private static final short MORTON_OFFSET = (XGeoUtils.BITS<<1) - (PRECISION*5);
/**
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final double lon, final double lat, final int level) {
// shift to appropriate level
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
return ((BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat)) >>> msf) << 4) | level;
}
/**
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final String hash) {
int level = hash.length()-1;
long b;
long l = 0L;
for(char c : hash.toCharArray()) {
b = (long)(BASE_32_STRING.indexOf(c));
l |= (b<<(level--*5));
}
return (l<<4)|hash.length();
}
/**
* Encode an existing geohash long to the provided precision
*/
public static long longEncode(long geohash, int level) {
final short precision = (short)(geohash & 15);
if (precision == level) {
return geohash;
} else if (precision > level) {
return ((geohash >>> (((precision - level) * 5) + 4)) << 4) | level;
}
return ((geohash >>> 4) << (((level - precision) * 5) + 4) | level);
}
/**
* Encode to a geohash string from the geohash based long format
*/
public static final String stringEncode(long geoHashLong) {
int level = (int)geoHashLong&15;
geoHashLong >>>= 4;
char[] chars = new char[level];
do {
chars[--level] = BASE_32[(int)(geoHashLong&31L)];
geoHashLong>>>=5;
} while(level > 0);
return new String(chars);
}
/**
* Encode to a geohash string from full resolution longitude, latitude)
*/
public static final String stringEncode(final double lon, final double lat) {
return stringEncode(lon, lat, 12);
}
/**
* Encode to a level specific geohash string from full resolution longitude, latitude
*/
public static final String stringEncode(final double lon, final double lat, final int level) {
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
final long hashedVal = BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat));
StringBuilder geoHash = new StringBuilder();
short precision = 0;
final short msf = (XGeoUtils.BITS<<1)-5;
long mask = 31L<<msf;
do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
// next 5 bits
mask >>>= 5;
} while (++precision < level);
return geoHash.toString();
}
/**
* Encode to a full precision geohash string from a given morton encoded long value
*/
public static final String stringEncodeFromMortonLong(final long hashedVal) throws Exception {
return stringEncode(hashedVal, PRECISION);
}
/**
* Encode to a geohash string at a given level from a morton long
*/
public static final String stringEncodeFromMortonLong(long hashedVal, final int level) {
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
hashedVal = BitUtil.flipFlop(hashedVal);
StringBuilder geoHash = new StringBuilder();
short precision = 0;
final short msf = (XGeoUtils.BITS<<1)-5;
long mask = 31L<<msf;
do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
// next 5 bits
mask >>>= 5;
} while (++precision < level);
return geoHash.toString();
}
/**
* Encode to a morton long value from a given geohash string
*/
public static final long mortonEncode(final String hash) {
int level = 11;
long b;
long l = 0L;
for(char c : hash.toCharArray()) {
b = (long)(BASE_32_STRING.indexOf(c));
l |= (b<<((level--*5) + MORTON_OFFSET));
}
return BitUtil.flipFlop(l);
}
/**
* Encode to a morton long value from a given geohash long value
*/
public static final long mortonEncode(final long geoHashLong) {
final int level = (int)(geoHashLong&15);
final short odd = (short)(level & 1);
return BitUtil.flipFlop((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd));
}
private static final char encode(int x, int y) {
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
}
/**
* Calculate all neighbors of a given geohash cell.
*
* @param geohash Geohash of the defined cell
* @return geohashes of all neighbor cells
*/
public static Collection<? extends CharSequence> neighbors(String geohash) {
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
}
/**
* Calculate the geohash of a neighbor of a geohash
*
* @param geohash the geohash of a cell
* @param level level of the geohash
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
* @return geohash of the defined cell
*/
private final static String neighbor(String geohash, int level, int dx, int dy) {
int cell = BASE_32_STRING.indexOf(geohash.charAt(level -1));
// Decoding the Geohash bit pattern to determine grid coordinates
int x0 = cell & 1; // first bit of x
int y0 = cell & 2; // first bit of y
int x1 = cell & 4; // second bit of x
int y1 = cell & 8; // second bit of y
int x2 = cell & 16; // third bit of x
// combine the bitpattern to grid coordinates.
// note that the semantics of x and y are swapping
// on each level
int x = x0 + (x1 / 2) + (x2 / 4);
int y = (y0 / 2) + (y1 / 4);
if (level == 1) {
// Root cells at north (namely "bcfguvyz") or at
// south (namely "0145hjnp") do not have neighbors
// in north/south direction
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
return null;
} else {
return Character.toString(encode(x + dx, y + dy));
}
} else {
// define grid coordinates for next level
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
// if the defined neighbor has the same parent a the current cell
// encode the cell directly. Otherwise find the cell next to this
// cell recursively. Since encoding wraps around within a cell
// it can be encoded here.
// xLimit and YLimit must always be respectively 7 and 3
// since x and y semantics are swapping on each level.
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
return geohash.substring(0, level - 1) + encode(nx, ny);
} else {
String neighbor = neighbor(geohash, level - 1, dx, dy);
return (neighbor != null) ? neighbor + encode(nx, ny) : neighbor;
}
}
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
return addNeighbors(geohash, geohash.length(), neighbors);
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param length level of the given geohash
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
String south = neighbor(geohash, length, 0, -1);
String north = neighbor(geohash, length, 0, +1);
if (north != null) {
neighbors.add(neighbor(north, length, -1, 0));
neighbors.add(north);
neighbors.add(neighbor(north, length, +1, 0));
}
neighbors.add(neighbor(geohash, length, -1, 0));
neighbors.add(neighbor(geohash, length, +1, 0));
if (south != null) {
neighbors.add(neighbor(south, length, -1, 0));
neighbors.add(south);
neighbors.add(neighbor(south, length, +1, 0));
}
return neighbors;
}
}

View File

@ -1,383 +0,0 @@
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Reusable geo-spatial projection utility methods.
*
* @lucene.experimental
*/
public class XGeoProjectionUtils {
// WGS84 earth-ellipsoid major (a) minor (b) radius, (f) flattening and eccentricity (e)
static final double SEMIMAJOR_AXIS = 6_378_137; // [m]
static final double FLATTENING = 1.0/298.257223563;
static final double SEMIMINOR_AXIS = SEMIMAJOR_AXIS * (1.0 - FLATTENING); //6_356_752.31420; // [m]
static final double ECCENTRICITY = StrictMath.sqrt((2.0 - FLATTENING) * FLATTENING);
static final double PI_OVER_2 = StrictMath.PI / 2.0D;
static final double SEMIMAJOR_AXIS2 = SEMIMAJOR_AXIS * SEMIMAJOR_AXIS;
static final double SEMIMINOR_AXIS2 = SEMIMINOR_AXIS * SEMIMINOR_AXIS;
/**
* Converts from geocentric earth-centered earth-fixed to geodesic lat/lon/alt
* @param x Cartesian x coordinate
* @param y Cartesian y coordinate
* @param z Cartesian z coordinate
* @param lla 0: longitude 1: latitude: 2: altitude
* @return double array as 0: longitude 1: latitude 2: altitude
*/
public static final double[] ecfToLLA(final double x, final double y, final double z, double[] lla) {
boolean atPole = false;
final double ad_c = 1.0026000D;
final double e2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
final double ep2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMINOR_AXIS2);
final double cos67P5 = 0.38268343236508977D;
if (lla == null) {
lla = new double[3];
}
if (x != 0.0) {
lla[0] = StrictMath.atan2(y,x);
} else {
if (y > 0) {
lla[0] = PI_OVER_2;
} else if (y < 0) {
lla[0] = -PI_OVER_2;
} else {
atPole = true;
lla[0] = 0.0D;
if (z > 0.0) {
lla[1] = PI_OVER_2;
} else if (z < 0.0) {
lla[1] = -PI_OVER_2;
} else {
lla[1] = PI_OVER_2;
lla[2] = -SEMIMINOR_AXIS;
return lla;
}
}
}
final double w2 = x*x + y*y;
final double w = StrictMath.sqrt(w2);
final double t0 = z * ad_c;
final double s0 = StrictMath.sqrt(t0 * t0 + w2);
final double sinB0 = t0 / s0;
final double cosB0 = w / s0;
final double sin3B0 = sinB0 * sinB0 * sinB0;
final double t1 = z + SEMIMINOR_AXIS * ep2 * sin3B0;
final double sum = w - SEMIMAJOR_AXIS * e2 * cosB0 * cosB0 * cosB0;
final double s1 = StrictMath.sqrt(t1 * t1 + sum * sum);
final double sinP1 = t1 / s1;
final double cosP1 = sum / s1;
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - e2 * sinP1 * sinP1);
if (cosP1 >= cos67P5) {
lla[2] = w / cosP1 - rn;
} else if (cosP1 <= -cos67P5) {
lla[2] = w / -cosP1 - rn;
} else {
lla[2] = z / sinP1 + rn * (e2 - 1.0);
}
if (!atPole) {
lla[1] = StrictMath.atan(sinP1/cosP1);
}
lla[0] = StrictMath.toDegrees(lla[0]);
lla[1] = StrictMath.toDegrees(lla[1]);
return lla;
}
/**
* Converts from geodesic lon lat alt to geocentric earth-centered earth-fixed
* @param lon geodesic longitude
* @param lat geodesic latitude
* @param alt geodesic altitude
* @param ecf reusable earth-centered earth-fixed result
* @return either a new ecef array or the reusable ecf parameter
*/
public static final double[] llaToECF(double lon, double lat, double alt, double[] ecf) {
lon = StrictMath.toRadians(lon);
lat = StrictMath.toRadians(lat);
final double sl = StrictMath.sin(lat);
final double s2 = sl*sl;
final double cl = StrictMath.cos(lat);
final double ge2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
if (ecf == null) {
ecf = new double[3];
}
if (lat < -PI_OVER_2 && lat > -1.001D * PI_OVER_2) {
lat = -PI_OVER_2;
} else if (lat > PI_OVER_2 && lat < 1.001D * PI_OVER_2) {
lat = PI_OVER_2;
}
assert (lat >= -PI_OVER_2) || (lat <= PI_OVER_2);
if (lon > StrictMath.PI) {
lon -= (2*StrictMath.PI);
}
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - ge2 * s2);
ecf[0] = (rn+alt) * cl * StrictMath.cos(lon);
ecf[1] = (rn+alt) * cl * StrictMath.sin(lon);
ecf[2] = ((rn*(1.0-ge2))+alt)*sl;
return ecf;
}
/**
* Converts from lat lon alt (in degrees) to East North Up right-hand coordinate system
* @param lon longitude in degrees
* @param lat latitude in degrees
* @param alt altitude in meters
* @param centerLon reference point longitude in degrees
* @param centerLat reference point latitude in degrees
* @param centerAlt reference point altitude in meters
* @param enu result east, north, up coordinate
* @return east, north, up coordinate
*/
public static double[] llaToENU(final double lon, final double lat, final double alt, double centerLon,
double centerLat, final double centerAlt, double[] enu) {
if (enu == null) {
enu = new double[3];
}
// convert point to ecf coordinates
final double[] ecf = llaToECF(lon, lat, alt, null);
// convert from ecf to enu
return ecfToENU(ecf[0], ecf[1], ecf[2], centerLon, centerLat, centerAlt, enu);
}
/**
* Converts from East North Up right-hand rule to lat lon alt in degrees
* @param x easting (in meters)
* @param y northing (in meters)
* @param z up (in meters)
* @param centerLon reference point longitude (in degrees)
* @param centerLat reference point latitude (in degrees)
* @param centerAlt reference point altitude (in meters)
* @param lla resulting lat, lon, alt point (in degrees)
* @return lat, lon, alt point (in degrees)
*/
public static double[] enuToLLA(final double x, final double y, final double z, final double centerLon,
final double centerLat, final double centerAlt, double[] lla) {
// convert enuToECF
if (lla == null) {
lla = new double[3];
}
// convert enuToECF, storing intermediate result in lla
lla = enuToECF(x, y, z, centerLon, centerLat, centerAlt, lla);
// convert ecf to LLA
return ecfToLLA(lla[0], lla[1], lla[2], lla);
}
/**
* Convert from Earth-Centered-Fixed to Easting, Northing, Up Right Hand System
* @param x ECF X coordinate (in meters)
* @param y ECF Y coordinate (in meters)
* @param z ECF Z coordinate (in meters)
* @param centerLon ENU origin longitude (in degrees)
* @param centerLat ENU origin latitude (in degrees)
* @param centerAlt ENU altitude (in meters)
* @param enu reusable enu result
* @return Easting, Northing, Up coordinate
*/
public static double[] ecfToENU(double x, double y, double z, final double centerLon,
final double centerLat, final double centerAlt, double[] enu) {
if (enu == null) {
enu = new double[3];
}
// create rotation matrix and rotate to enu orientation
final double[][] phi = createPhiTransform(centerLon, centerLat, null);
// convert origin to ENU
final double[] originECF = llaToECF(centerLon, centerLat, centerAlt, null);
final double[] originENU = new double[3];
originENU[0] = ((phi[0][0] * originECF[0]) + (phi[0][1] * originECF[1]) + (phi[0][2] * originECF[2]));
originENU[1] = ((phi[1][0] * originECF[0]) + (phi[1][1] * originECF[1]) + (phi[1][2] * originECF[2]));
originENU[2] = ((phi[2][0] * originECF[0]) + (phi[2][1] * originECF[1]) + (phi[2][2] * originECF[2]));
// rotate then translate
enu[0] = ((phi[0][0] * x) + (phi[0][1] * y) + (phi[0][2] * z)) - originENU[0];
enu[1] = ((phi[1][0] * x) + (phi[1][1] * y) + (phi[1][2] * z)) - originENU[1];
enu[2] = ((phi[2][0] * x) + (phi[2][1] * y) + (phi[2][2] * z)) - originENU[2];
return enu;
}
/**
* Convert from Easting, Northing, Up Right-Handed system to Earth Centered Fixed system
* @param x ENU x coordinate (in meters)
* @param y ENU y coordinate (in meters)
* @param z ENU z coordinate (in meters)
* @param centerLon ENU origin longitude (in degrees)
* @param centerLat ENU origin latitude (in degrees)
* @param centerAlt ENU origin altitude (in meters)
* @param ecf reusable ecf result
* @return ecf result coordinate
*/
public static double[] enuToECF(final double x, final double y, final double z, double centerLon,
double centerLat, final double centerAlt, double[] ecf) {
if (ecf == null) {
ecf = new double[3];
}
double[][] phi = createTransposedPhiTransform(centerLon, centerLat, null);
double[] ecfOrigin = llaToECF(centerLon, centerLat, centerAlt, null);
// rotate and translate
ecf[0] = (phi[0][0]*x + phi[0][1]*y + phi[0][2]*z) + ecfOrigin[0];
ecf[1] = (phi[1][0]*x + phi[1][1]*y + phi[1][2]*z) + ecfOrigin[1];
ecf[2] = (phi[2][0]*x + phi[2][1]*y + phi[2][2]*z) + ecfOrigin[2];
return ecf;
}
/**
* Create the rotation matrix for converting Earth Centered Fixed to Easting Northing Up
* @param originLon ENU origin longitude (in degrees)
* @param originLat ENU origin latitude (in degrees)
* @param phiMatrix reusable phi matrix result
* @return phi rotation matrix
*/
private static double[][] createPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
if (phiMatrix == null) {
phiMatrix = new double[3][3];
}
originLon = StrictMath.toRadians(originLon);
originLat = StrictMath.toRadians(originLat);
final double sLon = StrictMath.sin(originLon);
final double cLon = StrictMath.cos(originLon);
final double sLat = StrictMath.sin(originLat);
final double cLat = StrictMath.cos(originLat);
phiMatrix[0][0] = -sLon;
phiMatrix[0][1] = cLon;
phiMatrix[0][2] = 0.0D;
phiMatrix[1][0] = -sLat * cLon;
phiMatrix[1][1] = -sLat * sLon;
phiMatrix[1][2] = cLat;
phiMatrix[2][0] = cLat * cLon;
phiMatrix[2][1] = cLat * sLon;
phiMatrix[2][2] = sLat;
return phiMatrix;
}
/**
* Create the transposed rotation matrix for converting Easting Northing Up coordinates to Earth Centered Fixed
* @param originLon ENU origin longitude (in degrees)
* @param originLat ENU origin latitude (in degrees)
* @param phiMatrix reusable phi rotation matrix result
* @return transposed phi rotation matrix
*/
private static double[][] createTransposedPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
if (phiMatrix == null) {
phiMatrix = new double[3][3];
}
originLon = StrictMath.toRadians(originLon);
originLat = StrictMath.toRadians(originLat);
final double sLat = StrictMath.sin(originLat);
final double cLat = StrictMath.cos(originLat);
final double sLon = StrictMath.sin(originLon);
final double cLon = StrictMath.cos(originLon);
phiMatrix[0][0] = -sLon;
phiMatrix[1][0] = cLon;
phiMatrix[2][0] = 0.0D;
phiMatrix[0][1] = -sLat * cLon;
phiMatrix[1][1] = -sLat * sLon;
phiMatrix[2][1] = cLat;
phiMatrix[0][2] = cLat * cLon;
phiMatrix[1][2] = cLat * sLon;
phiMatrix[2][2] = sLat;
return phiMatrix;
}
/**
* Finds a point along a bearing from a given lon,lat geolocation using vincenty's distance formula
*
* @param lon origin longitude in degrees
* @param lat origin latitude in degrees
* @param bearing azimuthal bearing in degrees
* @param dist distance in meters
* @param pt resulting point
* @return the point along a bearing at a given distance in meters
*/
public static final double[] pointFromLonLatBearing(double lon, double lat, double bearing, double dist, double[] pt) {
if (pt == null) {
pt = new double[2];
}
final double alpha1 = StrictMath.toRadians(bearing);
final double cosA1 = StrictMath.cos(alpha1);
final double sinA1 = StrictMath.sin(alpha1);
final double tanU1 = (1-FLATTENING) * StrictMath.tan(StrictMath.toRadians(lat));
final double cosU1 = 1 / StrictMath.sqrt((1+tanU1*tanU1));
final double sinU1 = tanU1*cosU1;
final double sig1 = StrictMath.atan2(tanU1, cosA1);
final double sinAlpha = cosU1 * sinA1;
final double cosSqAlpha = 1 - sinAlpha*sinAlpha;
final double uSq = cosSqAlpha * (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2) / SEMIMINOR_AXIS2;
final double A = 1 + uSq/16384D*(4096D + uSq * (-768D + uSq * (320D - 175D*uSq)));
final double B = uSq/1024D * (256D + uSq * (-128D + uSq * (74D - 47D * uSq)));
double sigma = dist / (SEMIMINOR_AXIS*A);
double sigmaP;
double sinSigma, cosSigma, cos2SigmaM, deltaSigma;
do {
cos2SigmaM = StrictMath.cos(2*sig1 + sigma);
sinSigma = StrictMath.sin(sigma);
cosSigma = StrictMath.cos(sigma);
deltaSigma = B * sinSigma * (cos2SigmaM + (B/4D) * (cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)-
(B/6) * cos2SigmaM*(-3+4*sinSigma*sinSigma)*(-3+4*cos2SigmaM*cos2SigmaM)));
sigmaP = sigma;
sigma = dist / (SEMIMINOR_AXIS*A) + deltaSigma;
} while (StrictMath.abs(sigma-sigmaP) > 1E-12);
final double tmp = sinU1*sinSigma - cosU1*cosSigma*cosA1;
final double lat2 = StrictMath.atan2(sinU1*cosSigma + cosU1*sinSigma*cosA1,
(1-FLATTENING) * StrictMath.sqrt(sinAlpha*sinAlpha + tmp*tmp));
final double lambda = StrictMath.atan2(sinSigma*sinA1, cosU1*cosSigma - sinU1*sinSigma*cosA1);
final double c = FLATTENING/16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha));
final double lam = lambda - (1-c) * FLATTENING * sinAlpha *
(sigma + c * sinSigma * (cos2SigmaM + c * cosSigma * (-1 + 2* cos2SigmaM*cos2SigmaM)));
pt[0] = lon + StrictMath.toDegrees(lam);
pt[1] = StrictMath.toDegrees(lat2);
return pt;
}
}

View File

@ -1,429 +0,0 @@
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
/**
* Basic reusable geo-spatial utility methods
*
* @lucene.experimental
*/
public final class XGeoUtils {
private static final short MIN_LON = -180;
private static final short MIN_LAT = -90;
public static final short BITS = 31;
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
public static final double TOLERANCE = 1E-5;
/** Minimum longitude value. */
public static final double MIN_LON_INCL = -180.0D;
/** Maximum longitude value. */
public static final double MAX_LON_INCL = 180.0D;
/** Minimum latitude value. */
public static final double MIN_LAT_INCL = -90.0D;
/** Maximum latitude value. */
public static final double MAX_LAT_INCL = 90.0D;
// magic numbers for bit interleaving
private static final long MAGIC[] = {
0x5555555555555555L, 0x3333333333333333L,
0x0F0F0F0F0F0F0F0FL, 0x00FF00FF00FF00FFL,
0x0000FFFF0000FFFFL, 0x00000000FFFFFFFFL,
0xAAAAAAAAAAAAAAAAL
};
// shift values for bit interleaving
private static final short SHIFT[] = {1, 2, 4, 8, 16};
public static double LOG2 = StrictMath.log(2);
// No instance:
private XGeoUtils() {
}
public static Long mortonHash(final double lon, final double lat) {
return interleave(scaleLon(lon), scaleLat(lat));
}
public static double mortonUnhashLon(final long hash) {
return unscaleLon(deinterleave(hash));
}
public static double mortonUnhashLat(final long hash) {
return unscaleLat(deinterleave(hash >>> 1));
}
private static long scaleLon(final double val) {
return (long) ((val-MIN_LON) * LON_SCALE);
}
private static long scaleLat(final double val) {
return (long) ((val-MIN_LAT) * LAT_SCALE);
}
private static double unscaleLon(final long val) {
return (val / LON_SCALE) + MIN_LON;
}
private static double unscaleLat(final long val) {
return (val / LAT_SCALE) + MIN_LAT;
}
/**
* Interleaves the first 32 bits of each long value
*
* Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
*/
public static long interleave(long v1, long v2) {
v1 = (v1 | (v1 << SHIFT[4])) & MAGIC[4];
v1 = (v1 | (v1 << SHIFT[3])) & MAGIC[3];
v1 = (v1 | (v1 << SHIFT[2])) & MAGIC[2];
v1 = (v1 | (v1 << SHIFT[1])) & MAGIC[1];
v1 = (v1 | (v1 << SHIFT[0])) & MAGIC[0];
v2 = (v2 | (v2 << SHIFT[4])) & MAGIC[4];
v2 = (v2 | (v2 << SHIFT[3])) & MAGIC[3];
v2 = (v2 | (v2 << SHIFT[2])) & MAGIC[2];
v2 = (v2 | (v2 << SHIFT[1])) & MAGIC[1];
v2 = (v2 | (v2 << SHIFT[0])) & MAGIC[0];
return (v2<<1) | v1;
}
/**
* Deinterleaves long value back to two concatenated 32bit values
*/
public static long deinterleave(long b) {
b &= MAGIC[0];
b = (b ^ (b >>> SHIFT[0])) & MAGIC[1];
b = (b ^ (b >>> SHIFT[1])) & MAGIC[2];
b = (b ^ (b >>> SHIFT[2])) & MAGIC[3];
b = (b ^ (b >>> SHIFT[3])) & MAGIC[4];
b = (b ^ (b >>> SHIFT[4])) & MAGIC[5];
return b;
}
public static double compare(final double v1, final double v2) {
final double compare = v1-v2;
return Math.abs(compare) <= TOLERANCE ? 0 : compare;
}
/**
* Puts longitude in range of -180 to +180.
*/
public static double normalizeLon(double lon_deg) {
if (lon_deg >= -180 && lon_deg <= 180) {
return lon_deg; //common case, and avoids slight double precision shifting
}
double off = (lon_deg + 180) % 360;
if (off < 0) {
return 180 + off;
} else if (off == 0 && lon_deg > 0) {
return 180;
} else {
return -180 + off;
}
}
/**
* Puts latitude in range of -90 to 90.
*/
public static double normalizeLat(double lat_deg) {
if (lat_deg >= -90 && lat_deg <= 90) {
return lat_deg; //common case, and avoids slight double precision shifting
}
double off = Math.abs((lat_deg + 90) % 360);
return (off <= 180 ? off : 360-off) - 90;
}
public static final boolean bboxContains(final double lon, final double lat, final double minLon,
final double minLat, final double maxLon, final double maxLat) {
return (compare(lon, minLon) >= 0 && compare(lon, maxLon) <= 0
&& compare(lat, minLat) >= 0 && compare(lat, maxLat) <= 0);
}
/**
* simple even-odd point in polygon computation
* 1. Determine if point is contained in the longitudinal range
* 2. Determine whether point crosses the edge by computing the latitudinal delta
* between the end-point of a parallel vector (originating at the point) and the
* y-component of the edge sink
*
* NOTE: Requires polygon point (x,y) order either clockwise or counter-clockwise
*/
public static boolean pointInPolygon(double[] x, double[] y, double lat, double lon) {
assert x.length == y.length;
boolean inPoly = false;
/**
* Note: This is using a euclidean coordinate system which could result in
* upwards of 110KM error at the equator.
* TODO convert coordinates to cylindrical projection (e.g. mercator)
*/
for (int i = 1; i < x.length; i++) {
if (x[i] < lon && x[i-1] >= lon || x[i-1] < lon && x[i] >= lon) {
if (y[i] + (lon - x[i]) / (x[i-1] - x[i]) * (y[i-1] - y[i]) < lat) {
inPoly = !inPoly;
}
}
}
return inPoly;
}
public static String geoTermToString(long term) {
StringBuilder s = new StringBuilder(64);
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
for (int i = 0; i < numberOfLeadingZeros; i++) {
s.append('0');
}
if (term != 0) {
s.append(Long.toBinaryString(term));
}
return s.toString();
}
public static boolean rectDisjoint(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
}
/**
* Computes whether a rectangle is wholly within another rectangle (shared boundaries allowed)
*/
public static boolean rectWithin(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY);
}
public static boolean rectCrosses(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !(rectDisjoint(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY) ||
rectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY));
}
/**
* Computes whether rectangle a contains rectangle b (touching allowed)
*/
public static boolean rectContains(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !(bMinX < aMinX || bMinY < aMinY || bMaxX > aMaxX || bMaxY > aMaxY);
}
/**
* Computes whether a rectangle intersects another rectangle (crosses, within, touching, etc)
*/
public static boolean rectIntersects(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !((aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) );
}
/**
* Computes whether a rectangle crosses a shape. (touching not allowed)
*/
public static boolean rectCrossesPoly(final double rMinX, final double rMinY, final double rMaxX,
final double rMaxY, final double[] shapeX, final double[] shapeY,
final double sMinX, final double sMinY, final double sMaxX,
final double sMaxY) {
// short-circuit: if the bounding boxes are disjoint then the shape does not cross
if (rectDisjoint(rMinX, rMinY, rMaxX, rMaxY, sMinX, sMinY, sMaxX, sMaxY)) {
return false;
}
final double[][] bbox = new double[][] { {rMinX, rMinY}, {rMaxX, rMinY}, {rMaxX, rMaxY}, {rMinX, rMaxY}, {rMinX, rMinY} };
final int polyLength = shapeX.length-1;
double d, s, t, a1, b1, c1, a2, b2, c2;
double x00, y00, x01, y01, x10, y10, x11, y11;
// computes the intersection point between each bbox edge and the polygon edge
for (short b=0; b<4; ++b) {
a1 = bbox[b+1][1]-bbox[b][1];
b1 = bbox[b][0]-bbox[b+1][0];
c1 = a1*bbox[b+1][0] + b1*bbox[b+1][1];
for (int p=0; p<polyLength; ++p) {
a2 = shapeY[p+1]-shapeY[p];
b2 = shapeX[p]-shapeX[p+1];
// compute determinant
d = a1*b2 - a2*b1;
if (d != 0) {
// lines are not parallel, check intersecting points
c2 = a2*shapeX[p+1] + b2*shapeY[p+1];
s = (1/d)*(b2*c1 - b1*c2);
t = (1/d)*(a1*c2 - a2*c1);
x00 = StrictMath.min(bbox[b][0], bbox[b+1][0]) - TOLERANCE;
x01 = StrictMath.max(bbox[b][0], bbox[b+1][0]) + TOLERANCE;
y00 = StrictMath.min(bbox[b][1], bbox[b+1][1]) - TOLERANCE;
y01 = StrictMath.max(bbox[b][1], bbox[b+1][1]) + TOLERANCE;
x10 = StrictMath.min(shapeX[p], shapeX[p+1]) - TOLERANCE;
x11 = StrictMath.max(shapeX[p], shapeX[p+1]) + TOLERANCE;
y10 = StrictMath.min(shapeY[p], shapeY[p+1]) - TOLERANCE;
y11 = StrictMath.max(shapeY[p], shapeY[p+1]) + TOLERANCE;
// check whether the intersection point is touching one of the line segments
boolean touching = ((x00 == s && y00 == t) || (x01 == s && y01 == t))
|| ((x10 == s && y10 == t) || (x11 == s && y11 == t));
// if line segments are not touching and the intersection point is within the range of either segment
if (!(touching || x00 > s || x01 < s || y00 > t || y01 < t || x10 > s || x11 < s || y10 > t || y11 < t)) {
return true;
}
}
} // for each poly edge
} // for each bbox edge
return false;
}
/**
* Converts a given circle (defined as a point/radius) to an approximated line-segment polygon
*
* @param lon longitudinal center of circle (in degrees)
* @param lat latitudinal center of circle (in degrees)
* @param radius distance radius of circle (in meters)
* @return a list of lon/lat points representing the circle
*/
@SuppressWarnings({"unchecked","rawtypes"})
public static ArrayList<double[]> circleToPoly(final double lon, final double lat, final double radius) {
double angle;
// a little under-sampling (to limit the number of polygonal points): using archimedes estimation of pi
final int sides = 25;
ArrayList<double[]> geometry = new ArrayList();
double[] lons = new double[sides];
double[] lats = new double[sides];
double[] pt = new double[2];
final int sidesLen = sides-1;
for (int i=0; i<sidesLen; ++i) {
angle = (i*360/sides);
pt = XGeoProjectionUtils.pointFromLonLatBearing(lon, lat, angle, radius, pt);
lons[i] = pt[0];
lats[i] = pt[1];
}
// close the poly
lons[sidesLen] = lons[0];
lats[sidesLen] = lats[0];
geometry.add(lons);
geometry.add(lats);
return geometry;
}
/**
* Computes whether a rectangle is within a given polygon (shared boundaries allowed)
*/
public static boolean rectWithinPoly(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double[] shapeX, final double[] shapeY, final double sMinX,
final double sMinY, final double sMaxX, final double sMaxY) {
// check if rectangle crosses poly (to handle concave/pacman polys), then check that all 4 corners
// are contained
return !(rectCrossesPoly(rMinX, rMinY, rMaxX, rMaxY, shapeX, shapeY, sMinX, sMinY, sMaxX, sMaxY) ||
!pointInPolygon(shapeX, shapeY, rMinY, rMinX) || !pointInPolygon(shapeX, shapeY, rMinY, rMaxX) ||
!pointInPolygon(shapeX, shapeY, rMaxY, rMaxX) || !pointInPolygon(shapeX, shapeY, rMaxY, rMinX));
}
private static boolean rectAnyCornersOutsideCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 > radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 > radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 > radius
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 > radius);
}
private static boolean rectAnyCornersInCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 <= radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 <= radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 <= radius
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 <= radius);
}
public static boolean rectWithinCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return !(rectAnyCornersOutsideCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius));
}
/**
* Computes whether a rectangle crosses a circle
*/
public static boolean rectCrossesCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return rectAnyCornersInCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius)
|| lineCrossesSphere(rMinX, rMinY, 0, rMaxX, rMinY, 0, centerLon, centerLat, 0, radius)
|| lineCrossesSphere(rMaxX, rMinY, 0, rMaxX, rMaxY, 0, centerLon, centerLat, 0, radius)
|| lineCrossesSphere(rMaxX, rMaxY, 0, rMinX, rMaxY, 0, centerLon, centerLat, 0, radius)
|| lineCrossesSphere(rMinX, rMaxY, 0, rMinX, rMinY, 0, centerLon, centerLat, 0, radius);
}
/**
* Computes whether or a 3dimensional line segment intersects or crosses a sphere
*
* @param lon1 longitudinal location of the line segment start point (in degrees)
* @param lat1 latitudinal location of the line segment start point (in degrees)
* @param alt1 altitude of the line segment start point (in degrees)
* @param lon2 longitudinal location of the line segment end point (in degrees)
* @param lat2 latitudinal location of the line segment end point (in degrees)
* @param alt2 altitude of the line segment end point (in degrees)
* @param centerLon longitudinal location of center search point (in degrees)
* @param centerLat latitudinal location of center search point (in degrees)
* @param centerAlt altitude of the center point (in meters)
* @param radius search sphere radius (in meters)
* @return whether the provided line segment is a secant of the
*/
private static boolean lineCrossesSphere(double lon1, double lat1, double alt1, double lon2,
double lat2, double alt2, double centerLon, double centerLat,
double centerAlt, double radius) {
// convert to cartesian 3d (in meters)
double[] ecf1 = XGeoProjectionUtils.llaToECF(lon1, lat1, alt1, null);
double[] ecf2 = XGeoProjectionUtils.llaToECF(lon2, lat2, alt2, null);
double[] cntr = XGeoProjectionUtils.llaToECF(centerLon, centerLat, centerAlt, null);
final double dX = ecf2[0] - ecf1[0];
final double dY = ecf2[1] - ecf1[1];
final double dZ = ecf2[2] - ecf1[2];
final double fX = ecf1[0] - cntr[0];
final double fY = ecf1[1] - cntr[1];
final double fZ = ecf1[2] - cntr[2];
final double a = dX*dX + dY*dY + dZ*dZ;
final double b = 2 * (fX*dX + fY*dY + fZ*dZ);
final double c = (fX*fX + fY*fY + fZ*fZ) - (radius*radius);
double discrim = (b*b)-(4*a*c);
if (discrim < 0) {
return false;
}
discrim = StrictMath.sqrt(discrim);
final double a2 = 2*a;
final double t1 = (-b - discrim)/a2;
final double t2 = (-b + discrim)/a2;
if ( (t1 < 0 || t1 > 1) ) {
return !(t2 < 0 || t2 > 1);
}
return true;
}
public static boolean isValidLat(double lat) {
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
}
public static boolean isValidLon(double lon) {
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
}
}

View File

@ -19,81 +19,101 @@
package org.elasticsearch;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
/**
* Information about a build of Elasticsearch.
*/
public class Build {
/**
* The current build of Elasticsearch. Filled with information scanned at
* startup from the jar.
*/
public static final Build CURRENT;
static {
String hash = "NA";
String hashShort = "NA";
String timestamp = "NA";
final String shortHash;
final String date;
try (InputStream is = Build.class.getResourceAsStream("/es-build.properties")){
Properties props = new Properties();
props.load(is);
hash = props.getProperty("hash", hash);
if (!hash.equals("NA")) {
hashShort = hash.substring(0, 7);
Path path = getElasticsearchCodebase();
if (path.toString().endsWith(".jar")) {
try (JarInputStream jar = new JarInputStream(Files.newInputStream(path))) {
Manifest manifest = jar.getManifest();
shortHash = manifest.getMainAttributes().getValue("Change");
date = manifest.getMainAttributes().getValue("Build-Date");
} catch (IOException e) {
throw new RuntimeException(e);
}
String gitTimestampRaw = props.getProperty("timestamp");
if (gitTimestampRaw != null) {
timestamp = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC).print(Long.parseLong(gitTimestampRaw));
}
} catch (Exception e) {
// just ignore...
} else {
// not running from a jar (unit tests, IDE)
shortHash = "Unknown";
date = "Unknown";
}
if (shortHash == null) {
throw new IllegalStateException("Error finding the build shortHash. " +
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
}
if (date == null) {
throw new IllegalStateException("Error finding the build date. " +
"Stopping Elasticsearch now so it doesn't run in subtly broken ways. This is likely a build bug.");
}
CURRENT = new Build(hash, hashShort, timestamp);
CURRENT = new Build(shortHash, date);
}
private String hash;
private String hashShort;
private String timestamp;
Build(String hash, String hashShort, String timestamp) {
this.hash = hash;
this.hashShort = hashShort;
this.timestamp = timestamp;
/**
* Returns path to elasticsearch codebase path
*/
@SuppressForbidden(reason = "looks up path of elasticsearch.jar directly")
static Path getElasticsearchCodebase() {
URL url = Build.class.getProtectionDomain().getCodeSource().getLocation();
try {
return PathUtils.get(url.toURI());
} catch (URISyntaxException bogus) {
throw new RuntimeException(bogus);
}
}
public String hash() {
return hash;
private String shortHash;
private String date;
Build(String shortHash, String date) {
this.shortHash = shortHash;
this.date = date;
}
public String hashShort() {
return hashShort;
public String shortHash() {
return shortHash;
}
public String timestamp() {
return timestamp;
public String date() {
return date;
}
public static Build readBuild(StreamInput in) throws IOException {
String hash = in.readString();
String hashShort = in.readString();
String timestamp = in.readString();
return new Build(hash, hashShort, timestamp);
String date = in.readString();
return new Build(hash, date);
}
public static void writeBuild(Build build, StreamOutput out) throws IOException {
out.writeString(build.hash());
out.writeString(build.hashShort());
out.writeString(build.timestamp());
out.writeString(build.shortHash());
out.writeString(build.date());
}
@Override
public String toString() {
return "[" + hash + "][" + timestamp + "]";
return "[" + shortHash + "][" + date + "]";
}
}

View File

@ -42,15 +42,6 @@ public class ElasticsearchCorruptionException extends IOException {
* @param ex the exception cause
*/
public ElasticsearchCorruptionException(Throwable ex) {
this(ex.getMessage());
if (ex != null) {
this.setStackTrace(ex.getStackTrace());
}
Throwable[] suppressed = ex.getSuppressed();
if (suppressed != null) {
for (Throwable supressedExc : suppressed) {
addSuppressed(supressedExc);
}
}
super(ex);
}
}

View File

@ -51,6 +51,13 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
private static final Map<Class<? extends ElasticsearchException>, ElasticsearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE;
private final Map<String, List<String>> headers = new HashMap<>();
/**
* Construct a <code>ElasticsearchException</code> with the specified cause exception.
*/
public ElasticsearchException(Throwable cause) {
super(cause);
}
/**
* Construct a <code>ElasticsearchException</code> with the specified detail message.
*
@ -547,7 +554,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
NODE_DISCONNECTED_EXCEPTION(org.elasticsearch.transport.NodeDisconnectedException.class, org.elasticsearch.transport.NodeDisconnectedException::new, 84),
ALREADY_EXPIRED_EXCEPTION(org.elasticsearch.index.AlreadyExpiredException.class, org.elasticsearch.index.AlreadyExpiredException::new, 85),
AGGREGATION_EXECUTION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationExecutionException.class, org.elasticsearch.search.aggregations.AggregationExecutionException::new, 86),
MERGE_MAPPING_EXCEPTION(org.elasticsearch.index.mapper.MergeMappingException.class, org.elasticsearch.index.mapper.MergeMappingException::new, 87),
// 87 used to be for MergeMappingException
INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class, org.elasticsearch.indices.InvalidIndexTemplateException::new, 88),
PERCOLATE_EXCEPTION(org.elasticsearch.percolator.PercolateException.class, org.elasticsearch.percolator.PercolateException::new, 89),
REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class, org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90),

View File

@ -33,8 +33,12 @@ public class ElasticsearchTimeoutException extends ElasticsearchException {
super(in);
}
public ElasticsearchTimeoutException(Throwable cause) {
super(cause);
}
public ElasticsearchTimeoutException(String message, Object... args) {
super(message);
super(message, args);
}
public ElasticsearchTimeoutException(String message, Throwable cause, Object... args) {

View File

@ -47,14 +47,14 @@ public final class ExceptionsHelper {
if (t instanceof RuntimeException) {
return (RuntimeException) t;
}
return new ElasticsearchException(t.getMessage(), t);
return new ElasticsearchException(t);
}
public static ElasticsearchException convertToElastic(Throwable t) {
if (t instanceof ElasticsearchException) {
return (ElasticsearchException) t;
}
return new ElasticsearchException(t.getMessage(), t);
return new ElasticsearchException(t);
}
public static RestStatus status(Throwable t) {
@ -160,7 +160,7 @@ public final class ExceptionsHelper {
main = useOrSuppress(main, ex);
}
if (main != null) {
throw new ElasticsearchException(main.getMessage(), main);
throw new ElasticsearchException(main);
}
}

View File

@ -253,7 +253,9 @@ public class Version {
public static final int V_1_7_2_ID = 1070299;
public static final Version V_1_7_2 = new Version(V_1_7_2_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_3_ID = 1070399;
public static final Version V_1_7_3 = new Version(V_1_7_3_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final Version V_1_7_3 = new Version(V_1_7_3_ID, false, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_4_ID = 1070499;
public static final Version V_1_7_4 = new Version(V_1_7_4_ID, true, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_2_0_0_beta1_ID = 2000001;
public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
@ -262,13 +264,19 @@ public class Version {
public static final int V_2_0_0_rc1_ID = 2000051;
public static final Version V_2_0_0_rc1 = new Version(V_2_0_0_rc1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_0_ID = 2000099;
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_1_ID = 2000199;
public static final Version V_2_0_1 = new Version(V_2_0_1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_2_ID = 2000299;
public static final Version V_2_0_2 = new Version(V_2_0_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_1_0_ID = 2010099;
public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0);
public static final Version V_2_1_0 = new Version(V_2_1_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_1_1_ID = 2010199;
public static final Version V_2_1_1 = new Version(V_2_1_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_2_0_ID = 2020099;
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0);
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
public static final int V_3_0_0_ID = 3000099;
public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final Version CURRENT = V_3_0_0;
static {
@ -285,8 +293,14 @@ public class Version {
return V_3_0_0;
case V_2_2_0_ID:
return V_2_2_0;
case V_2_1_1_ID:
return V_2_1_1;
case V_2_1_0_ID:
return V_2_1_0;
case V_2_0_2_ID:
return V_2_0_2;
case V_2_0_1_ID:
return V_2_0_1;
case V_2_0_0_ID:
return V_2_0_0;
case V_2_0_0_rc1_ID:
@ -295,6 +309,8 @@ public class Version {
return V_2_0_0_beta2;
case V_2_0_0_beta1_ID:
return V_2_0_0_beta1;
case V_1_7_4_ID:
return V_1_7_4;
case V_1_7_3_ID:
return V_1_7_3;
case V_1_7_2_ID:
@ -545,7 +561,7 @@ public class Version {
}
String[] parts = version.split("\\.|\\-");
if (parts.length < 3 || parts.length > 4) {
throw new IllegalArgumentException("the version needs to contain major, minor and revision, and optionally the build: " + version);
throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version);
}
try {
@ -653,7 +669,7 @@ public class Version {
@SuppressForbidden(reason = "System.out.*")
public static void main(String[] args) {
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.hashShort() + "/" + Build.CURRENT.timestamp() + ", JVM: " + JvmInfo.jvmInfo().version());
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + ", JVM: " + JvmInfo.jvmInfo().version());
}
@Override

View File

@ -37,10 +37,6 @@ public interface ActionFuture<T> extends Future<T> {
* Similar to {@link #get()}, just catching the {@link InterruptedException} and throwing
* an {@link IllegalStateException} instead. Also catches
* {@link java.util.concurrent.ExecutionException} and throws the actual cause instead.
* <p>
* Note, the actual cause is unwrapped to the actual failure (for example, unwrapped
* from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is
* still accessible using {@link #getRootFailure()}.
*/
T actionGet();
@ -48,10 +44,6 @@ public interface ActionFuture<T> extends Future<T> {
* Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing
* an {@link IllegalStateException} instead. Also catches
* {@link java.util.concurrent.ExecutionException} and throws the actual cause instead.
* <p>
* Note, the actual cause is unwrapped to the actual failure (for example, unwrapped
* from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is
* still accessible using {@link #getRootFailure()}.
*/
T actionGet(String timeout);
@ -59,10 +51,6 @@ public interface ActionFuture<T> extends Future<T> {
* Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing
* an {@link IllegalStateException} instead. Also catches
* {@link java.util.concurrent.ExecutionException} and throws the actual cause instead.
* <p>
* Note, the actual cause is unwrapped to the actual failure (for example, unwrapped
* from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is
* still accessible using {@link #getRootFailure()}.
*
* @param timeoutMillis Timeout in millis
*/
@ -72,10 +60,6 @@ public interface ActionFuture<T> extends Future<T> {
* Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing
* an {@link IllegalStateException} instead. Also catches
* {@link java.util.concurrent.ExecutionException} and throws the actual cause instead.
* <p>
* Note, the actual cause is unwrapped to the actual failure (for example, unwrapped
* from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is
* still accessible using {@link #getRootFailure()}.
*/
T actionGet(long timeout, TimeUnit unit);
@ -83,16 +67,6 @@ public interface ActionFuture<T> extends Future<T> {
* Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing
* an {@link IllegalStateException} instead. Also catches
* {@link java.util.concurrent.ExecutionException} and throws the actual cause instead.
* <p>
* Note, the actual cause is unwrapped to the actual failure (for example, unwrapped
* from {@link org.elasticsearch.transport.RemoteTransportException}. The root failure is
* still accessible using {@link #getRootFailure()}.
*/
T actionGet(TimeValue timeout);
/**
* The root (possibly) wrapped failure.
*/
@Nullable
Throwable getRootFailure();
}

View File

@ -80,6 +80,8 @@ import org.elasticsearch.action.admin.indices.exists.types.TransportTypesExistsA
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction;
import org.elasticsearch.action.admin.indices.flush.FlushAction;
import org.elasticsearch.action.admin.indices.flush.TransportFlushAction;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
import org.elasticsearch.action.admin.indices.forcemerge.TransportForceMergeAction;
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
import org.elasticsearch.action.admin.indices.get.TransportGetIndexAction;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction;
@ -91,8 +93,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction;
import org.elasticsearch.action.admin.indices.open.OpenIndexAction;
import org.elasticsearch.action.admin.indices.open.TransportOpenIndexAction;
import org.elasticsearch.action.admin.indices.optimize.OptimizeAction;
import org.elasticsearch.action.admin.indices.optimize.TransportOptimizeAction;
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
import org.elasticsearch.action.admin.indices.recovery.TransportRecoveryAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
@ -134,8 +134,6 @@ import org.elasticsearch.action.bulk.TransportBulkAction;
import org.elasticsearch.action.bulk.TransportShardBulkAction;
import org.elasticsearch.action.delete.DeleteAction;
import org.elasticsearch.action.delete.TransportDeleteAction;
import org.elasticsearch.action.exists.ExistsAction;
import org.elasticsearch.action.exists.TransportExistsAction;
import org.elasticsearch.action.explain.ExplainAction;
import org.elasticsearch.action.explain.TransportExplainAction;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
@ -295,7 +293,7 @@ public class ActionModule extends AbstractModule {
registerAction(ValidateQueryAction.INSTANCE, TransportValidateQueryAction.class);
registerAction(RefreshAction.INSTANCE, TransportRefreshAction.class);
registerAction(FlushAction.INSTANCE, TransportFlushAction.class);
registerAction(OptimizeAction.INSTANCE, TransportOptimizeAction.class);
registerAction(ForceMergeAction.INSTANCE, TransportForceMergeAction.class);
registerAction(UpgradeAction.INSTANCE, TransportUpgradeAction.class);
registerAction(UpgradeStatusAction.INSTANCE, TransportUpgradeStatusAction.class);
registerAction(UpgradeSettingsAction.INSTANCE, TransportUpgradeSettingsAction.class);
@ -314,7 +312,6 @@ public class ActionModule extends AbstractModule {
registerAction(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class,
TransportShardMultiTermsVectorAction.class);
registerAction(DeleteAction.INSTANCE, TransportDeleteAction.class);
registerAction(ExistsAction.INSTANCE, TransportExistsAction.class);
registerAction(SuggestAction.INSTANCE, TransportSuggestAction.class);
registerAction(UpdateAction.INSTANCE, TransportUpdateAction.class);
registerAction(MultiGetAction.INSTANCE, TransportMultiGetAction.class,

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
* A base class for the response of a write operation that involves a single doc
*/
public abstract class DocWriteResponse extends ReplicationResponse implements StatusToXContent {
private ShardId shardId;
private String id;
private String type;
private long version;
public DocWriteResponse(ShardId shardId, String type, String id, long version) {
this.shardId = shardId;
this.type = type;
this.id = id;
this.version = version;
}
// needed for deserialization
protected DocWriteResponse() {
}
/**
* The index the document was changed in.
*/
public String getIndex() {
return this.shardId.getIndex();
}
/**
* The exact shard the document was changed in.
*/
public ShardId getShardId() {
return this.shardId;
}
/**
* The type of the document changed.
*/
public String getType() {
return this.type;
}
/**
* The id of the document changed.
*/
public String getId() {
return this.id;
}
/**
* Returns the current version of the doc.
*/
public long getVersion() {
return this.version;
}
/** returns the rest status for this response (based on {@link ShardInfo#status()} */
public RestStatus status() {
return getShardInfo().status();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardId = ShardId.readShardId(in);
type = in.readString();
id = in.readString();
version = in.readZLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardId.writeTo(out);
out.writeString(type);
out.writeString(id);
out.writeZLong(version);
}
static final class Fields {
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString _VERSION = new XContentBuilderString("_version");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
ReplicationResponse.ShardInfo shardInfo = getShardInfo();
builder.field(Fields._INDEX, shardId.getIndex())
.field(Fields._TYPE, type)
.field(Fields._ID, id)
.field(Fields._VERSION, version);
shardInfo.toXContent(builder, params);
return builder;
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.bootstrap.Elasticsearch;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -30,25 +29,23 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.Collections;
/**
* Base class for write action responses.
*/
public class ActionWriteResponse extends ActionResponse {
public class ReplicationResponse extends ActionResponse {
public final static ActionWriteResponse.ShardInfo.Failure[] EMPTY = new ActionWriteResponse.ShardInfo.Failure[0];
public final static ReplicationResponse.ShardInfo.Failure[] EMPTY = new ReplicationResponse.ShardInfo.Failure[0];
private ShardInfo shardInfo;
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardInfo = ActionWriteResponse.ShardInfo.readShardInfo(in);
shardInfo = ReplicationResponse.ShardInfo.readShardInfo(in);
}
@Override

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.action.admin.cluster.health;
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.unit.TimeValue;

View File

@ -22,10 +22,9 @@ package org.elasticsearch.action.admin.cluster.health;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTableValidation;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.health.ClusterStateHealth;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.health.ClusterIndexHealth;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
@ -36,38 +35,22 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.action.admin.cluster.health.ClusterIndexHealth.readClusterIndexHealth;
/**
*
*/
public class ClusterHealthResponse extends ActionResponse implements Iterable<ClusterIndexHealth>, StatusToXContent {
public class ClusterHealthResponse extends ActionResponse implements StatusToXContent {
private String clusterName;
int numberOfNodes = 0;
int numberOfDataNodes = 0;
int activeShards = 0;
int relocatingShards = 0;
int activePrimaryShards = 0;
int initializingShards = 0;
int unassignedShards = 0;
int numberOfPendingTasks = 0;
int numberOfInFlightFetch = 0;
int delayedUnassignedShards = 0;
TimeValue taskMaxWaitingTime = TimeValue.timeValueMillis(0);
double activeShardsPercent = 100;
boolean timedOut = false;
ClusterHealthStatus status = ClusterHealthStatus.RED;
private List<String> validationFailures;
Map<String, ClusterIndexHealth> indices = new HashMap<>();
private int numberOfPendingTasks = 0;
private int numberOfInFlightFetch = 0;
private int delayedUnassignedShards = 0;
private TimeValue taskMaxWaitingTime = TimeValue.timeValueMillis(0);
private boolean timedOut = false;
private ClusterStateHealth clusterStateHealth;
private ClusterHealthStatus clusterHealthStatus;
ClusterHealthResponse() {
}
@ -87,107 +70,53 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
this.numberOfPendingTasks = numberOfPendingTasks;
this.numberOfInFlightFetch = numberOfInFlightFetch;
this.taskMaxWaitingTime = taskMaxWaitingTime;
RoutingTableValidation validation = clusterState.routingTable().validate(clusterState.metaData());
validationFailures = validation.failures();
numberOfNodes = clusterState.nodes().size();
numberOfDataNodes = clusterState.nodes().dataNodes().size();
for (String index : concreteIndices) {
IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(index);
IndexMetaData indexMetaData = clusterState.metaData().index(index);
if (indexRoutingTable == null) {
continue;
}
ClusterIndexHealth indexHealth = new ClusterIndexHealth(indexMetaData, indexRoutingTable);
indices.put(indexHealth.getIndex(), indexHealth);
}
status = ClusterHealthStatus.GREEN;
for (ClusterIndexHealth indexHealth : indices.values()) {
activePrimaryShards += indexHealth.getActivePrimaryShards();
activeShards += indexHealth.getActiveShards();
relocatingShards += indexHealth.getRelocatingShards();
initializingShards += indexHealth.getInitializingShards();
unassignedShards += indexHealth.getUnassignedShards();
if (indexHealth.getStatus() == ClusterHealthStatus.RED) {
status = ClusterHealthStatus.RED;
} else if (indexHealth.getStatus() == ClusterHealthStatus.YELLOW && status != ClusterHealthStatus.RED) {
status = ClusterHealthStatus.YELLOW;
}
}
if (!validationFailures.isEmpty()) {
status = ClusterHealthStatus.RED;
} else if (clusterState.blocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE)) {
status = ClusterHealthStatus.RED;
}
// shortcut on green
if (status.equals(ClusterHealthStatus.GREEN)) {
this.activeShardsPercent = 100;
} else {
List<ShardRouting> shardRoutings = clusterState.getRoutingTable().allShards();
int activeShardCount = 0;
int totalShardCount = 0;
for (ShardRouting shardRouting : shardRoutings) {
if (shardRouting.active()) activeShardCount++;
totalShardCount++;
}
this.activeShardsPercent = (((double) activeShardCount) / totalShardCount) * 100;
}
this.clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices);
this.clusterHealthStatus = clusterStateHealth.getStatus();
}
public String getClusterName() {
return clusterName;
}
//package private for testing
ClusterStateHealth getClusterStateHealth() {
return clusterStateHealth;
}
/**
* The validation failures on the cluster level (without index validation failures).
*/
public List<String> getValidationFailures() {
return this.validationFailures;
return clusterStateHealth.getValidationFailures();
}
/**
* All the validation failures, including index level validation failures.
*/
public List<String> getAllValidationFailures() {
List<String> allFailures = new ArrayList<>(getValidationFailures());
for (ClusterIndexHealth indexHealth : indices.values()) {
allFailures.addAll(indexHealth.getValidationFailures());
}
return allFailures;
}
public int getActiveShards() {
return activeShards;
return clusterStateHealth.getActiveShards();
}
public int getRelocatingShards() {
return relocatingShards;
return clusterStateHealth.getRelocatingShards();
}
public int getActivePrimaryShards() {
return activePrimaryShards;
return clusterStateHealth.getActivePrimaryShards();
}
public int getInitializingShards() {
return initializingShards;
return clusterStateHealth.getInitializingShards();
}
public int getUnassignedShards() {
return unassignedShards;
return clusterStateHealth.getUnassignedShards();
}
public int getNumberOfNodes() {
return this.numberOfNodes;
return clusterStateHealth.getNumberOfNodes();
}
public int getNumberOfDataNodes() {
return this.numberOfDataNodes;
return clusterStateHealth.getNumberOfDataNodes();
}
public int getNumberOfPendingTasks() {
@ -214,12 +143,28 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
return this.timedOut;
}
public void setTimedOut(boolean timedOut) {
this.timedOut = timedOut;
}
public ClusterHealthStatus getStatus() {
return status;
return clusterHealthStatus;
}
/**
* Allows to explicitly override the derived cluster health status.
*
* @param status The override status. Must not be null.
*/
public void setStatus(ClusterHealthStatus status) {
if (status == null) {
throw new IllegalArgumentException("'status' must not be null");
}
this.clusterHealthStatus = status;
}
public Map<String, ClusterIndexHealth> getIndices() {
return indices;
return clusterStateHealth.getIndices();
}
/**
@ -234,15 +179,9 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
* The percentage of active shards, should be 100% in a green system
*/
public double getActiveShardsPercent() {
return activeShardsPercent;
return clusterStateHealth.getActiveShardsPercent();
}
@Override
public Iterator<ClusterIndexHealth> iterator() {
return indices.values().iterator();
}
public static ClusterHealthResponse readResponseFrom(StreamInput in) throws IOException {
ClusterHealthResponse response = new ClusterHealthResponse();
response.readFrom(in);
@ -253,36 +192,14 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = in.readString();
activePrimaryShards = in.readVInt();
activeShards = in.readVInt();
relocatingShards = in.readVInt();
initializingShards = in.readVInt();
unassignedShards = in.readVInt();
numberOfNodes = in.readVInt();
numberOfDataNodes = in.readVInt();
clusterHealthStatus = ClusterHealthStatus.fromValue(in.readByte());
clusterStateHealth = ClusterStateHealth.readClusterHealth(in);
numberOfPendingTasks = in.readInt();
status = ClusterHealthStatus.fromValue(in.readByte());
int size = in.readVInt();
for (int i = 0; i < size; i++) {
ClusterIndexHealth indexHealth = readClusterIndexHealth(in);
indices.put(indexHealth.getIndex(), indexHealth);
}
timedOut = in.readBoolean();
size = in.readVInt();
if (size == 0) {
validationFailures = Collections.emptyList();
} else {
for (int i = 0; i < size; i++) {
validationFailures.add(in.readString());
}
}
numberOfInFlightFetch = in.readInt();
if (in.getVersion().onOrAfter(Version.V_1_7_0)) {
delayedUnassignedShards= in.readInt();
}
activeShardsPercent = in.readDouble();
taskMaxWaitingTime = TimeValue.readTimeValue(in);
}
@ -290,31 +207,14 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(clusterName);
out.writeVInt(activePrimaryShards);
out.writeVInt(activeShards);
out.writeVInt(relocatingShards);
out.writeVInt(initializingShards);
out.writeVInt(unassignedShards);
out.writeVInt(numberOfNodes);
out.writeVInt(numberOfDataNodes);
out.writeByte(clusterHealthStatus.value());
clusterStateHealth.writeTo(out);
out.writeInt(numberOfPendingTasks);
out.writeByte(status.value());
out.writeVInt(indices.size());
for (ClusterIndexHealth indexHealth : this) {
indexHealth.writeTo(out);
}
out.writeBoolean(timedOut);
out.writeVInt(validationFailures.size());
for (String failure : validationFailures) {
out.writeString(failure);
}
out.writeInt(numberOfInFlightFetch);
if (out.getVersion().onOrAfter(Version.V_1_7_0)) {
out.writeInt(delayedUnassignedShards);
}
out.writeDouble(activeShardsPercent);
taskMaxWaitingTime.writeTo(out);
}
@ -389,7 +289,7 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
// if we don't print index level information, still print the index validation failures
// so we know why the status is red
if (!outputIndices) {
for (ClusterIndexHealth indexHealth : indices.values()) {
for (ClusterIndexHealth indexHealth : clusterStateHealth.getIndices().values()) {
builder.startObject(indexHealth.getIndex());
if (!indexHealth.getValidationFailures().isEmpty()) {
@ -408,7 +308,7 @@ public class ClusterHealthResponse extends ActionResponse implements Iterable<Cl
if (outputIndices) {
builder.startObject(Fields.INDICES);
for (ClusterIndexHealth indexHealth : indices.values()) {
for (ClusterIndexHealth indexHealth : clusterStateHealth.getIndices().values()) {
builder.startObject(indexHealth.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
indexHealth.toXContent(builder, params);
builder.endObject();

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.Strings;
@ -73,7 +74,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
protected void masterOperation(final ClusterHealthRequest request, final ClusterState unusedState, final ActionListener<ClusterHealthResponse> listener) {
if (request.waitForEvents() != null) {
final long endTimeMS = TimeValue.nsecToMSec(System.nanoTime()) + request.timeout().millis();
clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", request.waitForEvents(), new ClusterStateUpdateTask() {
clusterService.submitStateUpdateTask("cluster_health (wait_for_events [" + request.waitForEvents() + "])", new ClusterStateUpdateTask(request.waitForEvents()) {
@Override
public ClusterState execute(ClusterState currentState) {
return currentState;
@ -184,7 +185,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
// if the state is sufficient for what we where waiting for we don't need to mark this as timedOut.
// We spend too much time in waiting for events such that we might already reached a valid state.
// this should not mark the request as timed out
response.timedOut = timedOut && valid == false;
response.setTimedOut(timedOut && valid == false);
return response;
}
@ -204,7 +205,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
indexNameExpressionResolver.concreteIndices(clusterState, IndicesOptions.strictExpand(), request.indices());
waitForCounter++;
} catch (IndexNotFoundException e) {
response.status = ClusterHealthStatus.RED; // no indices, make sure its RED
response.setStatus(ClusterHealthStatus.RED); // no indices, make sure its RED
// missing indices, wait a bit more...
}
}
@ -272,13 +273,13 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
} catch (IndexNotFoundException e) {
// one of the specified indices is not there - treat it as RED.
ClusterHealthResponse response = new ClusterHealthResponse(clusterName.value(), Strings.EMPTY_ARRAY, clusterState,
numberOfPendingTasks, numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(), settings, clusterState),
numberOfPendingTasks, numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState),
pendingTaskTimeInQueue);
response.status = ClusterHealthStatus.RED;
response.setStatus(ClusterHealthStatus.RED);
return response;
}
return new ClusterHealthResponse(clusterName.value(), concreteIndices, clusterState, numberOfPendingTasks,
numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(), settings, clusterState), pendingTaskTimeInQueue);
numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), pendingTaskTimeInQueue);
}
}

View File

@ -72,14 +72,14 @@ public class NodeInfo extends BaseNodeResponse {
private HttpInfo http;
@Nullable
private PluginsInfo plugins;
private PluginsAndModules plugins;
NodeInfo() {
}
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsInfo plugins) {
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) {
super(node);
this.version = version;
this.build = build;
@ -172,7 +172,7 @@ public class NodeInfo extends BaseNodeResponse {
}
@Nullable
public PluginsInfo getPlugins() {
public PluginsAndModules getPlugins() {
return this.plugins;
}
@ -217,7 +217,8 @@ public class NodeInfo extends BaseNodeResponse {
http = HttpInfo.readHttpInfo(in);
}
if (in.readBoolean()) {
plugins = PluginsInfo.readPluginsInfo(in);
plugins = new PluginsAndModules();
plugins.readFrom(in);
}
}

View File

@ -77,7 +77,7 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
builder.field("ip", nodeInfo.getNode().getHostAddress(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("version", nodeInfo.getVersion());
builder.field("build", nodeInfo.getBuild().hashShort());
builder.field("build_hash", nodeInfo.getBuild().shortHash());
if (nodeInfo.getServiceAttributes() != null) {
for (Map.Entry<String, String> nodeAttribute : nodeInfo.getServiceAttributes().entrySet()) {

Some files were not shown because too many files have changed in this diff Show More